Skip to content
Snippets Groups Projects
Commit 8cc9f9ab authored by Carlos Natalino Da Silva's avatar Carlos Natalino Da Silva
Browse files

Skeleton of the centralized cybersecurity component that is ready to be called

parent f2287700
No related branches found
No related tags found
1 merge request!54Release 2.0.0
Showing
with 2494 additions and 2 deletions
......@@ -12,6 +12,7 @@ stages:
include:
- local: '/manifests/.gitlab-ci.yml'
#- local: '/src/monitoring/.gitlab-ci.yml'
- local: '/src/centralizedcybersecurity/.gitlab-ci.yml'
- local: '/src/context/.gitlab-ci.yml'
- local: '/src/device/.gitlab-ci.yml'
- local: '/src/service/.gitlab-ci.yml'
......
apiVersion: apps/v1
kind: Deployment
metadata:
name: centralizedcybersecurityservice
spec:
selector:
matchLabels:
app: centralizedcybersecurityservice
template:
metadata:
labels:
app: centralizedcybersecurityservice
spec:
terminationGracePeriodSeconds: 5
containers:
- name: server
image: registry.gitlab.com/teraflow-h2020/controller/centralizedcybersecurity:latest
imagePullPolicy: Always
ports:
- containerPort: 10010
env:
- name: DB_ENGINE
value: "redis"
- name: REDIS_DATABASE_ID
value: "0"
- name: LOG_LEVEL
value: "DEBUG"
readinessProbe:
exec:
command: ["/bin/grpc_health_probe", "-addr=:10010"]
livenessProbe:
exec:
command: ["/bin/grpc_health_probe", "-addr=:10010"]
resources:
requests:
cpu: 250m
memory: 512Mi
limits:
cpu: 700m
memory: 1024Mi
---
apiVersion: v1
kind: Service
metadata:
name: centralizedcybersecurityservice
spec:
type: ClusterIP
selector:
app: centralizedcybersecurityservice
ports:
- name: grpc
port: 10010
targetPort: 10010
---
apiVersion: v1
kind: Service
metadata:
name: centralizedcybersecurityservice-public
labels:
app: centralizedcybersecurityservice
spec:
type: NodePort
selector:
app: centralizedcybersecurityservice
ports:
- name: grpc
protocol: TCP
port: 10010
targetPort: 10010
---
#!/bin/bash
./report_coverage_all.sh | grep --color -E -i "^centralizedcybersecurity/.*$|$"
#!/bin/bash
cd $(dirname $0)/src
RCFILE=~/teraflow/controller/coverage/.coveragerc
COVERAGEFILE=~/teraflow/controller/coverage/.coverage
RCFILE=~/projects/teraflow/controller/coverage/.coveragerc
COVERAGEFILE=~/projects/teraflow/controller/coverage/.coverage
# Run unitary tests and analyze coverage of code at same time
......@@ -13,6 +13,9 @@ coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
common/database/tests/test_unitary.py \
common/database/tests/test_engine_inmemory.py
coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
centralizedcybersecurity/tests/test_unitary.py
coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
context/tests/test_unitary.py
......
# Build, tag, and push the Docker images to the GitLab Docker registry
build centralizedcybersecurity:
variables:
IMAGE_NAME: 'centralizedcybersecurity' # name of the microservice
IMAGE_NAME_TEST: 'centralizedcybersecurity-test' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
stage: build
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
script:
- docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
- docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
- docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
rules:
- changes:
- src/$IMAGE_NAME/**
- .gitlab-ci.yml
# Pull, execute, and run unitary tests for the Docker image from the GitLab registry
unit_test centralizedcybersecurity:
variables:
IMAGE_NAME: 'centralizedcybersecurity' # name of the microservice
IMAGE_NAME_TEST: 'centralizedcybersecurity-test' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
stage: unit_test
needs:
- build centralizedcybersecurity
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
- if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
script:
- docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
- docker run -d -p 3030:3030 --name $IMAGE_NAME --network=teraflowbridge "$IMAGE_NAME:$IMAGE_TAG"
- docker ps -a
- sleep 5
- docker ps -a
- docker logs $IMAGE_NAME
- docker exec -i $IMAGE_NAME bash -c "pytest --log-level=DEBUG --verbose $IMAGE_NAME/tests/test_unitary.py"
after_script:
- docker stop $IMAGE_NAME
- docker rm $IMAGE_NAME
rules:
- changes:
- src/$IMAGE_NAME/**
- .gitlab-ci.yml
# Deployment of the service in Kubernetes Cluster
deploy centralizedcybersecurity:
stage: deploy
needs:
- build centralizedcybersecurity
- unit_test centralizedcybersecurity
- dependencies all
- integ_test execute
script:
- kubectl version
- kubectl get all
- kubectl apply -f "manifests/centralizedcybersecurityservice.yaml"
- kubectl delete pods --selector app=centralizedcybersecurityservice
- kubectl get all
import logging
# General settings
LOG_LEVEL = logging.WARNING
# gRPC settings
GRPC_SERVICE_PORT = 10010
GRPC_MAX_WORKERS = 10
GRPC_GRACE_PERIOD = 60
# Prometheus settings
METRICS_PORT = 9192
FROM python:3-slim
# Install dependencies
RUN apt-get --yes --quiet --quiet update && \
apt-get --yes --quiet --quiet install wget g++ && \
rm -rf /var/lib/apt/lists/*
# Set Python to show logs as they occur
ENV PYTHONUNBUFFERED=0
# Download the gRPC health probe
RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \
chmod +x /bin/grpc_health_probe
# Get generic Python packages
RUN python3 -m pip install --upgrade pip setuptools wheel pip-tools
# Set working directory
WORKDIR /var/teraflow
# Create module sub-folders
RUN mkdir -p /var/teraflow/centralizedcybersecurity
# Get Python packages per module
COPY centralizedcybersecurity/requirements.in centralizedcybersecurity/requirements.in
RUN pip-compile --output-file=centralizedcybersecurity/requirements.txt centralizedcybersecurity/requirements.in
RUN python3 -m pip install -r centralizedcybersecurity/requirements.txt
# Add files into working directory
COPY common/. common
COPY centralizedcybersecurity/. centralizedcybersecurity
# Start centralizedcybersecurity service
ENTRYPOINT ["python", "-m", "centralizedcybersecurity.service"]
import grpc, logging
from common.tools.client.RetryDecorator import retry, delay_exponential
from centralizedcybersecurity.proto.context_pb2 import Empty
from centralizedcybersecurity.proto.service_pb2 import Service
from centralizedcybersecurity.proto.monitoring_pb2 import KpiList
from centralizedcybersecurity.proto.centralized_cybersecurity_pb2_grpc import CentralizedCyberSecurityServiceStub
LOGGER = logging.getLogger(__name__)
MAX_RETRIES = 15
DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0)
class CentralizedCybersecurityClient:
def __init__(self, address, port):
self.endpoint = '{}:{}'.format(address, port)
LOGGER.debug('Creating channel to {}...'.format(self.endpoint))
self.channel = None
self.stub = None
self.connect()
LOGGER.debug('Channel created')
def connect(self):
self.channel = grpc.insecure_channel(self.endpoint)
self.stub = CentralizedCyberSecurityServiceStub(self.channel)
def close(self):
if(self.channel is not None): self.channel.close()
self.channel = None
self.stub = None
@retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
def NotifyServiceUpdate(self, request : Service) -> Empty:
LOGGER.debug('NotifyServiceUpdate request: {}'.format(request))
response = self.stub.NotifyServiceUpdate(request)
LOGGER.debug('NotifyServiceUpdate result: {}'.format(response))
return response
@retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
def DetectAttack(self, request : Empty) -> Empty:
LOGGER.debug('DetectAttack request: {}'.format(request))
response = self.stub.DetectAttack(request)
LOGGER.debug('DetectAttack result: {}'.format(response))
return response
@retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
def ReportSummarizedKpi(self, request : KpiList) -> Empty:
LOGGER.debug('ReportSummarizedKpi request: {}'.format(request))
response = self.stub.ReportSummarizedKpi(request)
LOGGER.debug('ReportSummarizedKpi result: {}'.format(response))
return response
@retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
def ReportKpi(self, request : KpiList) -> Empty:
LOGGER.debug('ReportKpi request: {}'.format(request))
response = self.stub.ReportKpi(request)
LOGGER.debug('ReportKpi result: {}'.format(response))
return response
#!/bin/bash -eu
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash -e
# Make folder containing the script the root folder for its execution
cd $(dirname $0)
rm -rf proto/*.py
rm -rf proto/__pycache__
touch proto/__init__.py
# building protos of services used
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto context.proto
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto service.proto
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto monitoring.proto
rm proto/context_pb2_grpc.py
rm proto/service_pb2_grpc.py
rm proto/monitoring_pb2_grpc.py
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/context_pb2.py
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/service_pb2.py
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/monitoring_pb2.py
# building current service protos
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto centralized_cybersecurity.proto
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/centralized_cybersecurity_pb2.py
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/centralized_cybersecurity_pb2_grpc.py
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: centralized_cybersecurity.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import context_pb2 as context__pb2
from . import service_pb2 as service__pb2
from . import monitoring_pb2 as monitoring__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='centralized_cybersecurity.proto',
package='centralized_cybersecurity',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x1f\x63\x65ntralized_cybersecurity.proto\x12\x19\x63\x65ntralized_cybersecurity\x1a\rcontext.proto\x1a\rservice.proto\x1a\x10monitoring.proto2\x80\x02\n\x1f\x43\x65ntralizedCyberSecurityService\x12\x39\n\x13NotifyServiceUpdate\x12\x10.service.Service\x1a\x0e.context.Empty\"\x00\x12\x30\n\x0c\x44\x65tectAttack\x12\x0e.context.Empty\x1a\x0e.context.Empty\"\x00\x12<\n\x13ReportSummarizedKpi\x12\x13.monitoring.KpiList\x1a\x0e.context.Empty\"\x00\x12\x32\n\tReportKpi\x12\x13.monitoring.KpiList\x1a\x0e.context.Empty\"\x00\x62\x06proto3'
,
dependencies=[context__pb2.DESCRIPTOR,service__pb2.DESCRIPTOR,monitoring__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_CENTRALIZEDCYBERSECURITYSERVICE = _descriptor.ServiceDescriptor(
name='CentralizedCyberSecurityService',
full_name='centralized_cybersecurity.CentralizedCyberSecurityService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=111,
serialized_end=367,
methods=[
_descriptor.MethodDescriptor(
name='NotifyServiceUpdate',
full_name='centralized_cybersecurity.CentralizedCyberSecurityService.NotifyServiceUpdate',
index=0,
containing_service=None,
input_type=service__pb2._SERVICE,
output_type=context__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DetectAttack',
full_name='centralized_cybersecurity.CentralizedCyberSecurityService.DetectAttack',
index=1,
containing_service=None,
input_type=context__pb2._EMPTY,
output_type=context__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ReportSummarizedKpi',
full_name='centralized_cybersecurity.CentralizedCyberSecurityService.ReportSummarizedKpi',
index=2,
containing_service=None,
input_type=monitoring__pb2._KPILIST,
output_type=context__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ReportKpi',
full_name='centralized_cybersecurity.CentralizedCyberSecurityService.ReportKpi',
index=3,
containing_service=None,
input_type=monitoring__pb2._KPILIST,
output_type=context__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_CENTRALIZEDCYBERSECURITYSERVICE)
DESCRIPTOR.services_by_name['CentralizedCyberSecurityService'] = _CENTRALIZEDCYBERSECURITYSERVICE
# @@protoc_insertion_point(module_scope)
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from . import context_pb2 as context__pb2
from . import monitoring_pb2 as monitoring__pb2
from . import service_pb2 as service__pb2
class CentralizedCyberSecurityServiceStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.NotifyServiceUpdate = channel.unary_unary(
'/centralized_cybersecurity.CentralizedCyberSecurityService/NotifyServiceUpdate',
request_serializer=service__pb2.Service.SerializeToString,
response_deserializer=context__pb2.Empty.FromString,
)
self.DetectAttack = channel.unary_unary(
'/centralized_cybersecurity.CentralizedCyberSecurityService/DetectAttack',
request_serializer=context__pb2.Empty.SerializeToString,
response_deserializer=context__pb2.Empty.FromString,
)
self.ReportSummarizedKpi = channel.unary_unary(
'/centralized_cybersecurity.CentralizedCyberSecurityService/ReportSummarizedKpi',
request_serializer=monitoring__pb2.KpiList.SerializeToString,
response_deserializer=context__pb2.Empty.FromString,
)
self.ReportKpi = channel.unary_unary(
'/centralized_cybersecurity.CentralizedCyberSecurityService/ReportKpi',
request_serializer=monitoring__pb2.KpiList.SerializeToString,
response_deserializer=context__pb2.Empty.FromString,
)
class CentralizedCyberSecurityServiceServicer(object):
"""Missing associated documentation comment in .proto file."""
def NotifyServiceUpdate(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def DetectAttack(self, request, context):
"""rpc that triggers the attack detection loop
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ReportSummarizedKpi(self, request, context):
"""rpc called by the distributed component to report KPIs
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ReportKpi(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_CentralizedCyberSecurityServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'NotifyServiceUpdate': grpc.unary_unary_rpc_method_handler(
servicer.NotifyServiceUpdate,
request_deserializer=service__pb2.Service.FromString,
response_serializer=context__pb2.Empty.SerializeToString,
),
'DetectAttack': grpc.unary_unary_rpc_method_handler(
servicer.DetectAttack,
request_deserializer=context__pb2.Empty.FromString,
response_serializer=context__pb2.Empty.SerializeToString,
),
'ReportSummarizedKpi': grpc.unary_unary_rpc_method_handler(
servicer.ReportSummarizedKpi,
request_deserializer=monitoring__pb2.KpiList.FromString,
response_serializer=context__pb2.Empty.SerializeToString,
),
'ReportKpi': grpc.unary_unary_rpc_method_handler(
servicer.ReportKpi,
request_deserializer=monitoring__pb2.KpiList.FromString,
response_serializer=context__pb2.Empty.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'centralized_cybersecurity.CentralizedCyberSecurityService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class CentralizedCyberSecurityService(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def NotifyServiceUpdate(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/centralized_cybersecurity.CentralizedCyberSecurityService/NotifyServiceUpdate',
service__pb2.Service.SerializeToString,
context__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def DetectAttack(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/centralized_cybersecurity.CentralizedCyberSecurityService/DetectAttack',
context__pb2.Empty.SerializeToString,
context__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ReportSummarizedKpi(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/centralized_cybersecurity.CentralizedCyberSecurityService/ReportSummarizedKpi',
monitoring__pb2.KpiList.SerializeToString,
context__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def ReportKpi(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/centralized_cybersecurity.CentralizedCyberSecurityService/ReportKpi',
monitoring__pb2.KpiList.SerializeToString,
context__pb2.Empty.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
This diff is collapsed.
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: monitoring.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import context_pb2 as context__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='monitoring.proto',
package='monitoring',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x10monitoring.proto\x12\nmonitoring\x1a\rcontext.proto\"|\n\x03Kpi\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12\x11\n\ttimestamp\x18\x02 \x01(\t\x12\x16\n\x0ekpiDescription\x18\x03 \x01(\t\x12\'\n\tkpi_value\x18\x04 \x01(\x0b\x32\x14.monitoring.KpiValue\"&\n\x05KpiId\x12\x1d\n\x06kpi_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"T\n\tKpiDevice\x12!\n\x06kpi_id\x18\x01 \x01(\x0b\x32\x11.monitoring.KpiId\x12$\n\tdevice_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\"+\n\x07KpiList\x12 \n\x07kpiList\x18\x01 \x03(\x0b\x32\x0f.monitoring.Kpi\"M\n\x08KpiValue\x12\x10\n\x06intVal\x18\x01 \x01(\rH\x00\x12\x13\n\tstringVal\x18\x02 \x01(\tH\x00\x12\x11\n\x07\x62oolVal\x18\x03 \x01(\x08H\x00\x42\x07\n\x05value2\xeb\x01\n\x11MonitoringService\x12/\n\nIncludeKpi\x12\x0f.monitoring.Kpi\x1a\x0e.context.Empty\"\x00\x12\x35\n\nMonitorKpi\x12\x15.monitoring.KpiDevice\x1a\x0e.context.Empty\"\x00\x12\x37\n\rGetStream_kpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x30\x01\x12\x35\n\rGetInstantKpi\x12\x11.monitoring.KpiId\x1a\x0f.monitoring.Kpi\"\x00\x62\x06proto3'
,
dependencies=[context__pb2.DESCRIPTOR,])
_KPI = _descriptor.Descriptor(
name='Kpi',
full_name='monitoring.Kpi',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='kpi_id', full_name='monitoring.Kpi.kpi_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timestamp', full_name='monitoring.Kpi.timestamp', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='kpiDescription', full_name='monitoring.Kpi.kpiDescription', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='kpi_value', full_name='monitoring.Kpi.kpi_value', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=47,
serialized_end=171,
)
_KPIID = _descriptor.Descriptor(
name='KpiId',
full_name='monitoring.KpiId',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='kpi_id', full_name='monitoring.KpiId.kpi_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=173,
serialized_end=211,
)
_KPIDEVICE = _descriptor.Descriptor(
name='KpiDevice',
full_name='monitoring.KpiDevice',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='kpi_id', full_name='monitoring.KpiDevice.kpi_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='device_id', full_name='monitoring.KpiDevice.device_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=213,
serialized_end=297,
)
_KPILIST = _descriptor.Descriptor(
name='KpiList',
full_name='monitoring.KpiList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='kpiList', full_name='monitoring.KpiList.kpiList', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=299,
serialized_end=342,
)
_KPIVALUE = _descriptor.Descriptor(
name='KpiValue',
full_name='monitoring.KpiValue',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='intVal', full_name='monitoring.KpiValue.intVal', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='stringVal', full_name='monitoring.KpiValue.stringVal', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='boolVal', full_name='monitoring.KpiValue.boolVal', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='value', full_name='monitoring.KpiValue.value',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=344,
serialized_end=421,
)
_KPI.fields_by_name['kpi_id'].message_type = _KPIID
_KPI.fields_by_name['kpi_value'].message_type = _KPIVALUE
_KPIID.fields_by_name['kpi_id'].message_type = context__pb2._UUID
_KPIDEVICE.fields_by_name['kpi_id'].message_type = _KPIID
_KPIDEVICE.fields_by_name['device_id'].message_type = context__pb2._DEVICEID
_KPILIST.fields_by_name['kpiList'].message_type = _KPI
_KPIVALUE.oneofs_by_name['value'].fields.append(
_KPIVALUE.fields_by_name['intVal'])
_KPIVALUE.fields_by_name['intVal'].containing_oneof = _KPIVALUE.oneofs_by_name['value']
_KPIVALUE.oneofs_by_name['value'].fields.append(
_KPIVALUE.fields_by_name['stringVal'])
_KPIVALUE.fields_by_name['stringVal'].containing_oneof = _KPIVALUE.oneofs_by_name['value']
_KPIVALUE.oneofs_by_name['value'].fields.append(
_KPIVALUE.fields_by_name['boolVal'])
_KPIVALUE.fields_by_name['boolVal'].containing_oneof = _KPIVALUE.oneofs_by_name['value']
DESCRIPTOR.message_types_by_name['Kpi'] = _KPI
DESCRIPTOR.message_types_by_name['KpiId'] = _KPIID
DESCRIPTOR.message_types_by_name['KpiDevice'] = _KPIDEVICE
DESCRIPTOR.message_types_by_name['KpiList'] = _KPILIST
DESCRIPTOR.message_types_by_name['KpiValue'] = _KPIVALUE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Kpi = _reflection.GeneratedProtocolMessageType('Kpi', (_message.Message,), {
'DESCRIPTOR' : _KPI,
'__module__' : 'monitoring_pb2'
# @@protoc_insertion_point(class_scope:monitoring.Kpi)
})
_sym_db.RegisterMessage(Kpi)
KpiId = _reflection.GeneratedProtocolMessageType('KpiId', (_message.Message,), {
'DESCRIPTOR' : _KPIID,
'__module__' : 'monitoring_pb2'
# @@protoc_insertion_point(class_scope:monitoring.KpiId)
})
_sym_db.RegisterMessage(KpiId)
KpiDevice = _reflection.GeneratedProtocolMessageType('KpiDevice', (_message.Message,), {
'DESCRIPTOR' : _KPIDEVICE,
'__module__' : 'monitoring_pb2'
# @@protoc_insertion_point(class_scope:monitoring.KpiDevice)
})
_sym_db.RegisterMessage(KpiDevice)
KpiList = _reflection.GeneratedProtocolMessageType('KpiList', (_message.Message,), {
'DESCRIPTOR' : _KPILIST,
'__module__' : 'monitoring_pb2'
# @@protoc_insertion_point(class_scope:monitoring.KpiList)
})
_sym_db.RegisterMessage(KpiList)
KpiValue = _reflection.GeneratedProtocolMessageType('KpiValue', (_message.Message,), {
'DESCRIPTOR' : _KPIVALUE,
'__module__' : 'monitoring_pb2'
# @@protoc_insertion_point(class_scope:monitoring.KpiValue)
})
_sym_db.RegisterMessage(KpiValue)
_MONITORINGSERVICE = _descriptor.ServiceDescriptor(
name='MonitoringService',
full_name='monitoring.MonitoringService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=424,
serialized_end=659,
methods=[
_descriptor.MethodDescriptor(
name='IncludeKpi',
full_name='monitoring.MonitoringService.IncludeKpi',
index=0,
containing_service=None,
input_type=_KPI,
output_type=context__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='MonitorKpi',
full_name='monitoring.MonitoringService.MonitorKpi',
index=1,
containing_service=None,
input_type=_KPIDEVICE,
output_type=context__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetStream_kpi',
full_name='monitoring.MonitoringService.GetStream_kpi',
index=2,
containing_service=None,
input_type=_KPIID,
output_type=_KPI,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetInstantKpi',
full_name='monitoring.MonitoringService.GetInstantKpi',
index=3,
containing_service=None,
input_type=_KPIID,
output_type=_KPI,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_MONITORINGSERVICE)
DESCRIPTOR.services_by_name['MonitoringService'] = _MONITORINGSERVICE
# @@protoc_insertion_point(module_scope)
This diff is collapsed.
grpcio-health-checking
grpcio
prometheus-client
pytest
pytest-benchmark
redis
#
# This file is autogenerated by pip-compile with python 3.9
# To update, run:
#
# pip-compile
#
attrs==21.2.0
# via pytest
grpcio==1.41.0
# via
# -r requirements.in
# grpcio-health-checking
grpcio-health-checking==1.41.0
# via -r requirements.in
iniconfig==1.1.1
# via pytest
packaging==21.0
# via pytest
pluggy==1.0.0
# via pytest
prometheus-client==0.11.0
# via -r requirements.in
protobuf==3.18.0
# via grpcio-health-checking
py==1.10.0
# via pytest
py-cpuinfo==8.0.0
# via pytest-benchmark
pyparsing==2.4.7
# via packaging
pytest==6.2.5
# via
# -r requirements.in
# pytest-benchmark
pytest-benchmark==3.4.1
# via -r requirements.in
redis==3.5.3
# via -r requirements.in
six==1.16.0
# via grpcio
toml==0.10.2
# via pytest
import grpc
import logging
from concurrent import futures
from grpc_health.v1.health import HealthServicer, OVERALL_HEALTH
from grpc_health.v1.health_pb2 import HealthCheckResponse
from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
from centralizedcybersecurity.proto.centralized_cybersecurity_pb2_grpc import add_CentralizedCyberSecurityServiceServicer_to_server
from centralizedcybersecurity.service.CentralizedCybersecurityServiceServicerImpl import CentralizedCybersecurityServiceServicerImpl
from centralizedcybersecurity.Config import GRPC_SERVICE_PORT, GRPC_MAX_WORKERS, GRPC_GRACE_PERIOD
BIND_ADDRESS = '0.0.0.0'
LOGGER = logging.getLogger(__name__)
class CentralizedCybersecurityService:
def __init__(self, database, address=BIND_ADDRESS, port=GRPC_SERVICE_PORT, max_workers=GRPC_MAX_WORKERS,
grace_period=GRPC_GRACE_PERIOD):
self.database = database
self.address = address
self.port = port
self.endpoint = None
self.max_workers = max_workers
self.grace_period = grace_period
self.service_servicer = None
self.health_servicer = None
self.pool = None
self.server = None
def start(self):
self.endpoint = '{}:{}'.format(self.address, self.port)
LOGGER.debug('Starting Service (tentative endpoint: {}, max_workers: {})...'.format(
self.endpoint, self.max_workers))
self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
self.centralized_cybersecurity_servicer = CentralizedCybersecurityServiceServicerImpl(self.database)
add_CentralizedCyberSecurityServiceServicer_to_server(self.centralized_cybersecurity_servicer, self.server)
self.health_servicer = HealthServicer(
experimental_non_blocking=True, experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=1))
add_HealthServicer_to_server(self.health_servicer, self.server)
port = self.server.add_insecure_port(self.endpoint)
self.endpoint = '{}:{}'.format(self.address, port)
LOGGER.info('Listening on {}...'.format(self.endpoint))
self.server.start()
self.health_servicer.set(OVERALL_HEALTH, HealthCheckResponse.SERVING) # pylint: disable=maybe-no-member
LOGGER.debug('Service started')
def stop(self):
LOGGER.debug('Stopping service (grace period {} seconds)...'.format(self.grace_period))
self.health_servicer.enter_graceful_shutdown()
self.server.stop(self.grace_period)
LOGGER.debug('Service stopped')
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment