Skip to content
Snippets Groups Projects
Commit b9fe45e6 authored by Lluis Gifre Renom's avatar Lluis Gifre Renom
Browse files

Merge branch 'feat/webui' into 'develop'

First version of the WebUI

See merge request teraflow-h2020/controller!73
parents 0d715111 7b2637c6
No related branches found
No related tags found
1 merge request!54Release 2.0.0
Showing
with 3718 additions and 3 deletions
...@@ -22,6 +22,7 @@ include: ...@@ -22,6 +22,7 @@ include:
#- local: '/src/tester_integration/.gitlab-ci.yml' #- local: '/src/tester_integration/.gitlab-ci.yml'
#- local: '/src/tester_functional/.gitlab-ci.yml' #- local: '/src/tester_functional/.gitlab-ci.yml'
- local: '/src/automation/.gitlab-ci.yml' - local: '/src/automation/.gitlab-ci.yml'
- local: '/src/webui/.gitlab-ci.yml'
# - local: '/src/l3_distributedattackdetector/.gitlab-ci.yml' # - local: '/src/l3_distributedattackdetector/.gitlab-ci.yml'
# - local: '/src/l3_centralizedattackdetector/.gitlab-ci.yml' # - local: '/src/l3_centralizedattackdetector/.gitlab-ci.yml'
# - local: '/src/l3_attackmitigator/.gitlab-ci.yml' # - local: '/src/l3_attackmitigator/.gitlab-ci.yml'
...@@ -10,7 +10,7 @@ REGISTRY_IMAGE="" ...@@ -10,7 +10,7 @@ REGISTRY_IMAGE=""
#REGISTRY_IMAGE="http://my-container-registry.local/" #REGISTRY_IMAGE="http://my-container-registry.local/"
# Set the list of components you want to build images for, and deploy. # Set the list of components you want to build images for, and deploy.
COMPONENTS="context device automation policy service compute monitoring dbscanserving opticalattackmitigator opticalcentralizedattackdetector" COMPONENTS="context device automation policy service compute monitoring dbscanserving opticalattackmitigator opticalcentralizedattackdetector webui"
# Set the tag you want to use for your images. # Set the tag you want to use for your images.
IMAGE_TAG="tf-dev" IMAGE_TAG="tf-dev"
......
...@@ -7,7 +7,7 @@ pip install --upgrade pip setuptools wheel pip-tools pylint pytest pytest-benchm ...@@ -7,7 +7,7 @@ pip install --upgrade pip setuptools wheel pip-tools pylint pytest pytest-benchm
echo "" > requirements.in echo "" > requirements.in
#TODO: include here your component #TODO: include here your component
COMPONENTS="compute context device service monitoring opticalcentralizedattackdetector opticalattackmitigator dbscanserving" COMPONENTS="compute context device service monitoring opticalcentralizedattackdetector opticalattackmitigator dbscanserving webui"
# compiling dependencies from all components # compiling dependencies from all components
for component in $COMPONENTS for component in $COMPONENTS
......
apiVersion: apps/v1
kind: Deployment
metadata:
name: webuiservice
spec:
selector:
matchLabels:
app: webuiservice
template:
metadata:
labels:
app: webuiservice
spec:
terminationGracePeriodSeconds: 5
containers:
- name: server
image: registry.gitlab.com/teraflow-h2020/controller/webui:latest
imagePullPolicy: Always
ports:
- containerPort: 8004 # TODO: define the real port
env:
- name: LOG_LEVEL
value: "DEBUG"
readinessProbe:
httpGet:
path: /healthz/ready
port: 8004
initialDelaySeconds: 5
timeoutSeconds: 1
livenessProbe:
httpGet:
path: /healthz/live
port: 8004
initialDelaySeconds: 5
timeoutSeconds: 1
resources:
requests:
cpu: 250m
memory: 512Mi
limits:
cpu: 700m
memory: 1024Mi
---
apiVersion: v1
kind: Service
metadata:
name: webuiservice
spec:
type: ClusterIP
selector:
app: webuiservice
ports:
- name: http
port: 8004
targetPort: 8004
---
# apiVersion: v1
# kind: Service
# metadata:
# name: webuiservice-public
# labels:
# app: webuiservice
# spec:
# type: NodePort
# selector:
# app: webuiservice
# ports:
# - name: http
# protocol: TCP
# port: 8004
# targetPort: 8004
---
# this script opens the webui
WEBUI_PROTO=`kubectl get service/webuiservice -n tf-dev -o jsonpath='{.spec.ports[0].name}'`
WEBUI_IP=`kubectl get service/webuiservice -n tf-dev -o jsonpath='{.spec.clusterIP}'`
WEBUI_PORT=`kubectl get service/webuiservice -n tf-dev -o jsonpath='{.spec.ports[0].port}'`
URL=${WEBUI_PROTO}://${WEBUI_IP}:${WEBUI_PORT}
echo Opening web UI on URL ${URL}
# curl -kL ${URL}
python3 -m webbrowser ${URL}
\ No newline at end of file
...@@ -59,3 +59,6 @@ coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ ...@@ -59,3 +59,6 @@ coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \ coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
compute/tests/test_unitary.py compute/tests/test_unitary.py
coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
webui/tests/test_unitary.py
#
# This file is autogenerated by pip-compile with python 3.9
# To update, run:
#
# pip-compile src/context/requirements.in
#
aniso8601==9.0.1
# via flask-restful
attrs==21.2.0
# via pytest
certifi==2021.5.30
# via requests
charset-normalizer==2.0.6
# via requests
click==8.0.1
# via flask
flask==2.0.1
# via flask-restful
flask-restful==0.3.9
# via -r src/context/requirements.in
grpcio==1.41.0
# via
# -r src/context/requirements.in
# grpcio-health-checking
grpcio-health-checking==1.41.0
# via -r src/context/requirements.in
idna==3.2
# via requests
iniconfig==1.1.1
# via pytest
itsdangerous==2.0.1
# via flask
jinja2==3.0.1
# via flask
markupsafe==2.0.1
# via jinja2
packaging==21.0
# via pytest
pluggy==1.0.0
# via pytest
prometheus-client==0.11.0
# via -r src/context/requirements.in
protobuf==3.18.0
# via grpcio-health-checking
py==1.10.0
# via pytest
py-cpuinfo==8.0.0
# via pytest-benchmark
pyparsing==2.4.7
# via packaging
pytest==6.2.5
# via
# -r src/context/requirements.in
# pytest-benchmark
pytest-benchmark==3.4.1
# via -r src/context/requirements.in
pytz==2021.1
# via flask-restful
redis==3.5.3
# via -r src/context/requirements.in
requests==2.26.0
# via -r src/context/requirements.in
six==1.16.0
# via
# flask-restful
# grpcio
toml==0.10.2
# via pytest
urllib3==1.26.7
# via requests
werkzeug==2.0.1
# via flask
...@@ -22,7 +22,7 @@ class DeviceClient: ...@@ -22,7 +22,7 @@ class DeviceClient:
self.stub = DeviceServiceStub(self.channel) self.stub = DeviceServiceStub(self.channel)
def close(self): def close(self):
if self.channel is not None: self.channel.close() if(self.channel is not None): self.channel.close()
self.channel = None self.channel = None
self.stub = None self.stub = None
......
# for development purposes only
export CONTEXT_SERVICE_ADDRESS=`kubectl get service/contextservice -n tf-dev -o jsonpath='{.spec.clusterIP}'`
echo $CONTEXT_SERVICE_ADDRESS
export DEVICE_SERVICE_ADDRESS=`kubectl get service/deviceservice -n tf-dev -o jsonpath='{.spec.clusterIP}'`
echo $DEVICE_SERVICE_ADDRESS
export HOST="127.0.0.1"
export FLASK_ENV="development"
python -m webui.service
# build, tag and push the Docker image to the gitlab registry
build webui:
variables:
IMAGE_NAME: 'webui' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
stage: build
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
script:
- docker build -t "$IMAGE_NAME:$IMAGE_TAG" -f ./src/$IMAGE_NAME/Dockerfile ./src/
- docker tag "$IMAGE_NAME:$IMAGE_TAG" "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
- docker push "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
after_script:
- docker images --filter="dangling=true" --quiet | xargs -r docker rmi
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
- changes:
- src/$IMAGE_NAME/**/*.{py,in,yml}
- src/$IMAGE_NAME/Dockerfile
- src/$IMAGE_NAME/tests/*.py
- src/$IMAGE_NAME/tests/Dockerfile
- manifests/$IMAGE_NAME.yaml
- .gitlab-ci.yml
# apply unit test to the webui component
unit test webui:
variables:
IMAGE_NAME: 'webui' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
stage: unit_test
needs:
- build webui
before_script:
- docker login -u "$CI_REGISTRY_USER" -p "$CI_REGISTRY_PASSWORD" $CI_REGISTRY
- if docker network list | grep teraflowbridge; then echo "teraflowbridge is already created"; else docker network create -d bridge teraflowbridge; fi
- if docker container ls | grep $IMAGE_NAME; then docker rm -f $IMAGE_NAME; else echo "$IMAGE_NAME image is not in the system"; fi
script:
- docker pull "$CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG"
- docker run --name $IMAGE_NAME -d -p 8004:8004 -v "$PWD/src/$IMAGE_NAME/tests:/opt/results" --network=teraflowbridge --rm $CI_REGISTRY_IMAGE/$IMAGE_NAME:$IMAGE_TAG
- sleep 5
- docker ps -a
- docker logs $IMAGE_NAME
- docker exec -i $IMAGE_NAME bash -c "coverage run -m pytest --log-level=DEBUG --verbose ${IMAGE_NAME}/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}_report.xml; coverage xml -o /opt/results/${IMAGE_NAME}_coverage.xml; ls -la /opt/results; coverage report --include='${IMAGE_NAME}/*' --show-missing"
coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/'
after_script:
- docker rm -f $IMAGE_NAME
- docker network rm teraflowbridge
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
- changes:
- src/$IMAGE_NAME/**/*.{py,in,yml}
- src/$IMAGE_NAME/Dockerfile
- src/$IMAGE_NAME/tests/*.py
- src/$IMAGE_NAME/tests/Dockerfile
- manifests/$IMAGE_NAME.yaml
- .gitlab-ci.yml
artifacts:
when: always
reports:
junit: src/$IMAGE_NAME/tests/${IMAGE_NAME}_report.xml
cobertura: src/$IMAGE_NAME/tests/${IMAGE_NAME}_coverage.xml
# Deployment of the webui service in Kubernetes Cluster
deploy webui:
variables:
IMAGE_NAME: 'webui' # name of the microservice
IMAGE_TAG: 'latest' # tag of the container image (production, development, etc)
stage: deploy
needs:
- unit test webui
# - integ_test execute
script:
- 'sed -i "s/$IMAGE_NAME:.*/$IMAGE_NAME:$IMAGE_TAG/" manifests/$IMAGE_NAME.yaml'
- kubectl version
- kubectl get all
- kubectl apply -f "manifests/$IMAGE_NAME.yaml"
- kubectl get all
# environment:
# name: test
# url: https://example.com
# kubernetes:
# namespace: test
rules:
- if: '$CI_PIPELINE_SOURCE == "merge_request_event" && ($CI_MERGE_REQUEST_TARGET_BRANCH_NAME == "develop" || $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $CI_DEFAULT_BRANCH)'
when: manual
- if: '$CI_PIPELINE_SOURCE == "push" && $CI_COMMIT_BRANCH == "develop"'
when: manual
\ No newline at end of file
import os
import logging
# General settings
LOG_LEVEL = logging.DEBUG
# gRPC settings
WEBUI_SERVICE_PORT = 8004
# Prometheus settings
METRICS_PORT = 9192
SECRET_KEY = '>s&}24@{]]#k3&^5$f3#?6?h3{W@[}/7z}2pa]>{3&5%RP<)[('
HOST = '0.0.0.0' # accepts connections coming from any ADDRESS
DEBUG=False
CONTEXT_SERVICE_ADDRESS = os.environ.get('CONTEXTSERVICE_SERVICE_HOST', 'contextservice')
CONTEXT_SERVICE_PORT = 1010
DEVICE_SERVICE_ADDRESS = os.environ.get('DEVICESERVICE_SERVICE_HOST', 'deviceservice')
DEVICE_SERVICE_PORT = 2020
FROM python:3-slim
# Ref: https://pythonspeed.com/articles/activate-virtualenv-dockerfile/
# Install dependencies
RUN apt-get --yes --quiet --quiet update && \
apt-get --yes --quiet --quiet install wget g++ && \
rm -rf /var/lib/apt/lists/*
# Set Python to show logs as they occur
ENV PYTHONUNBUFFERED=0
ENV PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION='python'
# Download the gRPC health probe -- not needed here... health will be asserted using HTTP
# RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
# wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \
# chmod +x /bin/grpc_health_probe
# creating a user for security reasons
RUN groupadd -r webui && useradd --no-log-init -r -m -g webui webui
USER webui
# set working directory
RUN mkdir -p /home/webui/teraflow
WORKDIR /home/webui/teraflow
# Get Python packages per module
ENV VIRTUAL_ENV=/home/webui/venv
RUN python3 -m venv ${VIRTUAL_ENV}
ENV PATH="${VIRTUAL_ENV}/bin:${PATH}"
COPY --chown=webui:webui webui/requirements.in /home/webui/teraflow/webui/requirements.in
RUN pip install --upgrade "pip<22" setuptools wheel pip-tools && pip-compile --output-file=webui/requirements.txt webui/requirements.in
RUN pip install -r webui/requirements.txt
# Add files into working directory
COPY --chown=webui:webui common/. common
COPY --chown=webui:webui context/__init__.py context/__init__.py
COPY --chown=webui:webui context/proto/. context/proto
COPY --chown=webui:webui context/client/. context/client
COPY --chown=webui:webui device/__init__.py device/__init__.py
COPY --chown=webui:webui device/proto/. device/proto
COPY --chown=webui:webui device/client/. device/client
COPY --chown=webui:webui webui/. webui
# Start webui service
ENTRYPOINT ["python", "-m", "webui.service"]
#!/bin/bash -eu
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash -e
# Make folder containing the script the root folder for its execution
cd $(dirname $0)
rm -rf proto/*.py
rm -rf proto/__pycache__
touch proto/__init__.py
# building protos of services used
# python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto compute.proto
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto context.proto
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto device.proto
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto service.proto
python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto monitoring.proto
# rm proto/compute_pb2_grpc.py
rm proto/context_pb2_grpc.py
rm proto/device_pb2_grpc.py
rm proto/service_pb2_grpc.py
rm proto/monitoring_pb2_grpc.py
# sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/compute_pb2.py
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/context_pb2.py
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/device_pb2.py
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/service_pb2.py
sed -i -E 's/(import\ .*)_pb2/from . \1_pb2/g' proto/monitoring_pb2.py
This diff is collapsed.
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: device.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import context_pb2 as context__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='device.proto',
package='device',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x0c\x64\x65vice.proto\x12\x06\x64\x65vice\x1a\rcontext.proto2\xf0\x01\n\rDeviceService\x12\x31\n\tAddDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x37\n\x0f\x43onfigureDevice\x12\x0f.context.Device\x1a\x11.context.DeviceId\"\x00\x12\x33\n\x0c\x44\x65leteDevice\x12\x11.context.DeviceId\x1a\x0e.context.Empty\"\x00\x12>\n\x10GetInitialConfig\x12\x11.context.DeviceId\x1a\x15.context.DeviceConfig\"\x00\x62\x06proto3'
,
dependencies=[context__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_DEVICESERVICE = _descriptor.ServiceDescriptor(
name='DeviceService',
full_name='device.DeviceService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=40,
serialized_end=280,
methods=[
_descriptor.MethodDescriptor(
name='AddDevice',
full_name='device.DeviceService.AddDevice',
index=0,
containing_service=None,
input_type=context__pb2._DEVICE,
output_type=context__pb2._DEVICEID,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='ConfigureDevice',
full_name='device.DeviceService.ConfigureDevice',
index=1,
containing_service=None,
input_type=context__pb2._DEVICE,
output_type=context__pb2._DEVICEID,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteDevice',
full_name='device.DeviceService.DeleteDevice',
index=2,
containing_service=None,
input_type=context__pb2._DEVICEID,
output_type=context__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetInitialConfig',
full_name='device.DeviceService.GetInitialConfig',
index=3,
containing_service=None,
input_type=context__pb2._DEVICEID,
output_type=context__pb2._DEVICECONFIG,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_DEVICESERVICE)
DESCRIPTOR.services_by_name['DeviceService'] = _DEVICESERVICE
# @@protoc_insertion_point(module_scope)
This diff is collapsed.
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: service.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import context_pb2 as context__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='service.proto',
package='service',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\rservice.proto\x12\x07service\x1a\rcontext.proto2\xfd\x01\n\x0eServiceService\x12\x37\n\rCreateService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x37\n\rUpdateService\x12\x10.context.Service\x1a\x12.context.ServiceId\"\x00\x12\x35\n\rDeleteService\x12\x12.context.ServiceId\x1a\x0e.context.Empty\"\x00\x12\x42\n\x11GetConnectionList\x12\x12.context.ServiceId\x1a\x17.context.ConnectionList\"\x00\x62\x06proto3'
,
dependencies=[context__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_SERVICESERVICE = _descriptor.ServiceDescriptor(
name='ServiceService',
full_name='service.ServiceService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=42,
serialized_end=295,
methods=[
_descriptor.MethodDescriptor(
name='CreateService',
full_name='service.ServiceService.CreateService',
index=0,
containing_service=None,
input_type=context__pb2._SERVICE,
output_type=context__pb2._SERVICEID,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateService',
full_name='service.ServiceService.UpdateService',
index=1,
containing_service=None,
input_type=context__pb2._SERVICE,
output_type=context__pb2._SERVICEID,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteService',
full_name='service.ServiceService.DeleteService',
index=2,
containing_service=None,
input_type=context__pb2._SERVICEID,
output_type=context__pb2._EMPTY,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetConnectionList',
full_name='service.ServiceService.GetConnectionList',
index=3,
containing_service=None,
input_type=context__pb2._SERVICEID,
output_type=context__pb2._CONNECTIONLIST,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_SERVICESERVICE)
DESCRIPTOR.services_by_name['ServiceService'] = _SERVICESERVICE
# @@protoc_insertion_point(module_scope)
flask
flask-wtf
flask-healthz
flask-unittest
grpcio
grpcio-health-checking
prometheus-client
pytest
pytest-benchmark
lorem-text
coverage
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment