Skip to content
Snippets Groups Projects
Commit a47888b0 authored by Luis de la Cal's avatar Luis de la Cal
Browse files

Changes related to CAD scalability experiments:

- CSV async generation
- Fixed script to get scalability csv from docker
- Updated csv inside docker with local file
parent 1108d8b5
No related branches found
No related tags found
No related merge requests found
cad.txt 0 → 100644
Source diff could not be displayed: it is too large. Options to address this: view the blob.
Source diff could not be displayed: it is too large. Options to address this: view the blob.
pod=$(kubectl get pods -n "tfs" -l app=l3-centralizedattackdetectorservice | sed -n '2p' | cut -d " " -f1)
while true; do kubectl -n "tfs" cp $pod:prediction_accuracy.txt ./prediction_accuracy.txt; clear; cat prediction_accuracy.txt | tail -n 10; sleep 1; done
\ No newline at end of file
while true; do kubectl -n "tfs" cp $pod:prediction_accuracy.txt ./prediction_accuracy.txt; cat prediction_accuracy.txt | tail -n 10; sleep 1; done
\ No newline at end of file
deployment_name=l3-centralizedattackdetectorservice
pod_name=$(kubectl get pods -n "tfs" -l app=$deployment_name | sed -n "2p" | cut -d " " -f1)
container_name=$(kubectl get pods $pod_name -n tfs -o jsonpath="{.spec.containers[1].name}")
echo "pod_name: $pod_name"
echo "container_name: $container_name"
echo "deployment_name: $deployment_name"
sleep 3
while true; do kubectl -n "tfs" cp $pod_name:prediction_accuracy.txt ./prediction_accuracy.txt -c $container_name; clear; cat prediction_accuracy.txt | tail -n 10; sleep 1; done
\ No newline at end of file
deployment_name=l3-centralizedattackdetectorservice
pod_name=$(kubectl get pods -n "tfs" -l app=$deployment_name | sed -n "2p" | cut -d " " -f1)
container_name=$(kubectl get pods $pod_name -n tfs -o jsonpath="{.spec.containers[1].name}")
echo "pod_name: $pod_name"
echo "container_name: $container_name"
echo "deployment_name: $deployment_name"
sleep 3
while true; do kubectl -n "tfs" cp $pod_name:scalability_accuracy.csv ./scalability_accuracy.csv -c $container_name; cat scalability_accuracy.csv | tail -n 10; sleep 1; done
\ No newline at end of file
......@@ -68,6 +68,8 @@ spec:
- name: grpc
port: 10002
targetPort: 10002
strategy:
type: Recreate
---
apiVersion: autoscaling/v2
......
......@@ -68,6 +68,8 @@ spec:
- name: grpc
port: 10001
targetPort: 10001
strategy:
type: Recreate
---
apiVersion: autoscaling/v2
......
Source diff could not be displayed: it is too large. Options to address this: view the blob.
......@@ -5,8 +5,4 @@ REMOTE_HOST="192.168.165.11"
DEST_DIR="/home/ubuntu/TeraflowDockerDistributedScalability/l3_distributedattackdetector/proto"
# Copy the files to the remote host
sshpass -p "ubuntu" scp /home/ubuntu/tfs-ctrl-new/proto/src/python/l3_centralizedattackdetector_pb2.py "$REMOTE_HOST:$DEST_DIR"
sshpass -p "ubuntu" scp /home/ubuntu/tfs-ctrl-new/proto/src/python/l3_centralizedattackdetector_pb2_grpc.py "$REMOTE_HOST:$DEST_DIR"
sshpass -p "ubuntu" scp /home/ubuntu/tfs-ctrl-new/proto/src/python/l3_attackmitigator_pb2.py "$REMOTE_HOST:$DEST_DIR"
sshpass -p "ubuntu" scp /home/ubuntu/tfs-ctrl-new/proto/src/python/l3_attackmitigator_pb2_grpc.py "$REMOTE_HOST:$DEST_DIR"
\ No newline at end of file
sshpass -p "ubuntu" scp -r /home/ubuntu/tfs-ctrl-new/proto/src/python/ "$REMOTE_HOST:$DEST_DIR"
\ No newline at end of file
TIME_CONS OVERALL_ACCURACY CRYPTO_ACCURACY TOTAL_PREDICTIONS TOTAL_POSITIVES F_POSITIVES T_NEGATIVES F_NEGATIVES CONFIDENCE TIMESTAMP TIME_TO_STABILIZE
60 0.0 0.0 0.0 0.0 0.0 0.0 0.0 0.0 |18/04/2023 09:11:32| 2
......@@ -19,6 +19,9 @@ RUN apt-get --yes --quiet --quiet update && \
apt-get --yes --quiet --quiet install wget g++ && \
rm -rf /var/lib/apt/lists/*
RUN apt-get update && apt-get install -y tar
# Set Python to show logs as they occur
ENV PYTHONUNBUFFERED=0
......
......@@ -36,6 +36,7 @@ RUN python3 -m pip install --upgrade pip-tools
# Note: this step enables sharing the previous Docker build steps among all the Python components
WORKDIR /var/teraflow
COPY common_requirements.in common_requirements.in
COPY scalability_accuracy.csv scalability_accuracy.csv
RUN pip-compile --quiet --output-file=common_requirements.txt common_requirements.in
RUN python3 -m pip install -r common_requirements.txt
......
......@@ -21,7 +21,10 @@ import numpy as np
import onnxruntime as rt
import logging
import time
import csv
from multiprocessing import Process
from multiprocessing import Value
from common.proto.l3_centralizedattackdetector_pb2 import Empty, AutoFeatures
from common.proto.l3_centralizedattackdetector_pb2_grpc import L3CentralizedattackdetectorServicer
......@@ -50,7 +53,7 @@ current_dir = os.path.dirname(os.path.abspath(__file__))
DEMO_MODE = True
ATTACK_IPS = ["37.187.95.110", "91.121.140.167", "94.23.23.52", "94.23.247.226", "149.202.83.171"]
TIME_TO_STABILIZE = 3 # minutes
TIME_TO_STABILIZE = 2 # minutes
TIME_START = time.time()
MAX_CONNECTION_TIME = 60
......@@ -182,11 +185,21 @@ class l3_centralizedattackdetectorServiceServicerImpl(L3Centralizedattackdetecto
# List of attack connections
self.attack_connections = []
# Accuracy metrics
self.correct_attack_conns = 0
self.correct_predictions = 0
self.total_predictions = 0
self.false_positives = 0
self.false_negatives = 0
self.attack_connections_len = Value('f', 0) # Must use multiprocessing.Value to share values between processes
self.total_predictions = Value('f', 0)
self.false_positives = Value('f', 0)
self.false_negatives = Value('f', 0)
self.overall_detection_acc = Value('f', 0)
self.cryptomining_attack_detection_acc = Value('f', 0)
self.confidence = Value('f', 0)
self.calculated_csv = False
# Start process to generate accuracy scalability csv asynchronically
p = Process(target=self.generate_accuracy_scalability_csv)
p.start()
"""
Create a monitored KPI for a specific service and add it to the Monitoring Client
......@@ -496,7 +509,8 @@ class l3_centralizedattackdetectorServiceServicerImpl(L3Centralizedattackdetecto
LOGGER.debug("cryptomining_detector_output: {}".format(cryptomining_detector_output))
if DEMO_MODE:
self.analyze_prediction_accuracy(cryptomining_detector_output["confidence"])
self.confidence.value = cryptomining_detector_output["confidence"]
self.analyze_prediction_accuracy()
connection_info = ConnectionInfo(
request.connection_metadata.ip_o,
......@@ -539,9 +553,9 @@ class l3_centralizedattackdetectorServiceServicerImpl(L3Centralizedattackdetecto
self.correct_predictions += 1
else:
LOGGER.debug("False positive: {}".format(connection_info))
self.false_positives += 1
self.false_positives.value = self.false_positives.value + 1
self.total_predictions += 1
self.total_predictions.value = self.total_predictions.value + 1
# if False:
notification_time_start = time.perf_counter()
......@@ -604,64 +618,66 @@ class l3_centralizedattackdetectorServiceServicerImpl(L3Centralizedattackdetecto
self.correct_predictions += 1
else:
LOGGER.debug("False negative: {}".format(connection_info))
self.false_negatives += 1
self.false_negatives.value = self.false_negatives.value + 1
self.total_predictions += 1
self.total_predictions.value = self.total_predictions.value + 1
return Empty(message="Ok, information received (no attack detected)")
def analyze_prediction_accuracy(self, confidence):
def analyze_prediction_accuracy(self):
LOGGER.info("Number of Attack Connections Correctly Classified: {}".format(self.correct_attack_conns))
LOGGER.info("Number of Attack Connections: {}".format(len(self.attack_connections)))
if self.total_predictions > 0:
overall_detection_acc = self.correct_predictions / self.total_predictions
if self.total_predictions.value > 0:
self.overall_detection_acc.value = self.correct_predictions / self.total_predictions.value
else:
overall_detection_acc = 0
self.overall_detection_acc.value = 0
LOGGER.info("Overall Detection Accuracy: {}\n".format(overall_detection_acc))
LOGGER.info("Overall Detection Accuracy: {}\n".format(self.overall_detection_acc.value))
if len(self.attack_connections) > 0:
cryptomining_attack_detection_acc = self.correct_attack_conns / len(self.attack_connections)
self.cryptomining_attack_detection_acc.value = self.correct_attack_conns / len(self.attack_connections)
else:
cryptomining_attack_detection_acc = 0
self.cryptomining_attack_detection_acc.value = 0
LOGGER.info("Cryptomining Attack Detection Accuracy: {}".format(cryptomining_attack_detection_acc))
LOGGER.info("Cryptomining Detector Confidence: {}".format(confidence))
LOGGER.info("Cryptomining Attack Detection Accuracy: {}".format(self.cryptomining_attack_detection_acc.value))
LOGGER.info("Cryptomining Detector Confidence: {}".format(self.confidence.value))
self.generate_accuracy_log(overall_detection_acc, cryptomining_attack_detection_acc, confidence)
LOGGER.info("Time elapsed: {}".format(time.time() - TIME_START))
if (time.time() - TIME_START) >= (TIME_TO_STABILIZE * 60): # 10 minutes
self.generate_accuracy_scalability_csv(overall_detection_acc, cryptomining_attack_detection_acc, confidence)
def generate_accuracy_log(self, overall_detection_acc, cryptomining_attack_detection_acc, confidence):
self.attack_connections_len.value = len(self.attack_connections)
with open("prediction_accuracy.txt", "a") as f:
LOGGER.debug("Exporting prediction accuracy and confidence")
f.write("Overall Detection Accuracy: {}\n".format(overall_detection_acc))
f.write("Cryptomining Attack Detection Accuracy: {}\n".format(cryptomining_attack_detection_acc))
f.write("Total Predictions: {}\n".format(self.total_predictions))
f.write("Overall Detection Accuracy: {}\n".format(self.overall_detection_acc.value))
f.write("Cryptomining Attack Detection Accuracy: {}\n".format(self.cryptomining_attack_detection_acc.value))
f.write("Total Predictions: {}\n".format(self.total_predictions.value))
f.write("Total Positives: {}\n".format(len(self.attack_connections)))
f.write("False Positives: {}\n".format(self.false_positives))
f.write("True Negatives: {}\n".format(self.total_predictions - len(self.attack_connections)))
f.write("False Negatives: {}\n".format(self.false_negatives))
f.write("Cryptomining Detector Confidence: {}\n\n".format(confidence))
f.write("False Positives: {}\n".format(self.false_positives.value))
f.write("True Negatives: {}\n".format(self.total_predictions.value - len(self.attack_connections)))
f.write("False Negatives: {}\n".format(self.false_negatives.value))
f.write("Cryptomining Detector Confidence: {}\n\n".format(self.confidence.value))
f.write("Timestamp: {}\n".format(datetime.now().strftime("%d/%m/%Y %H:%M:%S")))
f.close()
def generate_accuracy_scalability_csv(self, overall_detection_acc, cryptomining_attack_detection_acc, confidence):
LOGGER.info("Generating scalability accuracy csv")
def generate_accuracy_scalability_csv(self):
LOGGER.debug("Starting async prediction accuracy analysis 2")
LOGGER.debug("Correct csv load: {}".format(os.path.exists("/var/teraflow/scalability_accuracy.csv")))
with open("scalability_accuracy.csv", 'a', newline='') as f:
# Wait for the system to stabilize
time.sleep(TIME_TO_STABILIZE * 60)
LOGGER.debug("Scalability csv started")
with open("/var/teraflow/scalability_accuracy.csv", 'a', newline='') as f:
spamwriter = csv.writer(f, delimiter=' ', quotechar='|', quoting=csv.QUOTE_MINIMAL)
spamwriter.writerow(["TIME_CONS, OVERALL_ACCURACY, CRYPTO_ACCURACY,\
TOTAL_PREDICTIONS, TOTAL_POSITIVES, F_POSITIVES,\
T_NEGATIVES, F_NEGATIVES, CONFIDENCE, TIMESTAMP, TIME_TO_STABILIZE"])
'''spamwriter.writerow(['TIME_CONS', 'OVERALL_ACCURACY', 'CRYPTO_ACCURACY',
'TOTAL_PREDICTIONS', 'TOTAL_POSITIVES', 'F_POSITIVES',
'T_NEGATIVES', 'F_NEGATIVES', 'CONFIDENCE', 'TIMESTAMP', 'TIME_TO_STABILIZE'])'''
spamwriter.writerow([MAX_CONNECTION_TIME] + [overall_detection_acc] + [cryptomining_attack_detection_acc]\
+ [self.total_predictions] + [len(self.attack_connections)] + [self.false_positives]\
+ [self.total_predictions - len(self.attack_connections)] + [self.false_negatives]\
+ [confidence] + [datetime.now().strftime("%d/%m/%Y %H:%M:%S")] + [TIME_TO_STABILIZE])
spamwriter.writerow([MAX_CONNECTION_TIME, self.overall_detection_acc.value, self.cryptomining_attack_detection_acc.value,
self.total_predictions.value, self.attack_connections_len.value, self.false_positives.value,
self.total_predictions.value - self.attack_connections_len.value, self.false_negatives.value,
self.confidence.value, datetime.now().strftime("%d/%m/%Y %H:%M:%S"), TIME_TO_STABILIZE])
f.close()
......@@ -670,6 +686,7 @@ class l3_centralizedattackdetectorServiceServicerImpl(L3Centralizedattackdetecto
for metric in request.metrics:
self.AnalyzeConnectionStatistics(metric, context)
batch_time_end = time.time()
with open("batch_time.txt", "a") as f:
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please to comment