diff --git a/extract_cad_csv_periodically.sh b/extract_cad_csv_periodically.sh new file mode 100644 index 0000000000000000000000000000000000000000..bcdf4dbf6fd7ff9605f91f8b2925a492296b2ec7 --- /dev/null +++ b/extract_cad_csv_periodically.sh @@ -0,0 +1,23 @@ +folder_name="cad_exp_1_results" + +if [ -d "$folder_name" ]; then + echo "Folder '$folder_name' already exists. Emptying it..." + rm -r "$folder_name"/* +else + echo "Creating folder '$folder_name'..." + mkdir "$folder_name" +fi + +while true; do + list=($(kubectl get pods --namespace tfs | grep l3-centralized | awk '{print $1}')) + #kubectl -n "tfs" cp $pod_name:exp_1.csv $folder_name/$pod_name.csv -c server + echo "These are the pods for now" + for item in "${list[@]}"; do + echo $item + kubectl -n "tfs" cp $item:cad_metrics.csv $folder_name/$item.csv -c server + done + sleep 2 +done + +# kubectl get pods --namespace tfs | grep l3-centralized | wc -l +# kubectl --namespace tfs get all | grep autoscaling/l3-centralizedattackdetectorservice-hpa | awk '{print $3}' \ No newline at end of file diff --git a/proto/copy_to_dad.sh b/proto/copy_to_dad.sh index a866488bca256187b72bce47384b04a651a22c69..e8bf24e81aa2776ff868b84ebf2c0f43a00b113a 100755 --- a/proto/copy_to_dad.sh +++ b/proto/copy_to_dad.sh @@ -1,12 +1,17 @@ #!/bin/bash # Set the variables for the remote host and destination directory -REMOTE_HOST="192.168.165.73" +REMOTE_HOST="192.168.165.11" DEST_DIR="/home/ubuntu/TeraflowDockerDistributed/l3_distributedattackdetector/proto" # Copy the files to the remote host +echo "Copying proto files to the remote host ($REMOTE_HOST)" sshpass -p "ubuntu" scp /home/ubuntu/tfs-ctrl-new/proto/src/python/l3_centralizedattackdetector_pb2.py "$REMOTE_HOST:$DEST_DIR" sshpass -p "ubuntu" scp /home/ubuntu/tfs-ctrl-new/proto/src/python/l3_centralizedattackdetector_pb2_grpc.py "$REMOTE_HOST:$DEST_DIR" +echo "CAD proto files copied" sshpass -p "ubuntu" scp /home/ubuntu/tfs-ctrl-new/proto/src/python/l3_attackmitigator_pb2.py "$REMOTE_HOST:$DEST_DIR" -sshpass -p "ubuntu" scp /home/ubuntu/tfs-ctrl-new/proto/src/python/l3_attackmitigator_pb2_grpc.py "$REMOTE_HOST:$DEST_DIR" \ No newline at end of file +sshpass -p "ubuntu" scp /home/ubuntu/tfs-ctrl-new/proto/src/python/l3_attackmitigator_pb2_grpc.py "$REMOTE_HOST:$DEST_DIR" +echo "AM proto files copied" + +echo "Proto files copied to the remote host ($REMOTE_HOST) successfully" \ No newline at end of file diff --git a/src/l3_centralizedattackdetector/service/l3_centralizedattackdetectorServiceServicerImpl.py b/src/l3_centralizedattackdetector/service/l3_centralizedattackdetectorServiceServicerImpl.py index e65eb5a84b73c768ea25831406ffc8e76e7d8cec..6be956358d4e7cc192a5106b5c1ebffe8db8e7bd 100644 --- a/src/l3_centralizedattackdetector/service/l3_centralizedattackdetectorServiceServicerImpl.py +++ b/src/l3_centralizedattackdetector/service/l3_centralizedattackdetectorServiceServicerImpl.py @@ -42,6 +42,8 @@ import uuid from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method +import csv + LOGGER = logging.getLogger(__name__) current_dir = os.path.dirname(os.path.abspath(__file__)) @@ -192,7 +194,18 @@ class l3_centralizedattackdetectorServiceServicerImpl(L3Centralizedattackdetecto self.replica_uuid = uuid.uuid4() + self.first_batch_request_time = 0 + self.last_batch_request_time = 0 + LOGGER.info("This replica's identifier is: " + str(self.replica_uuid)) + + csv_file_path = 'hola_mundo.csv' + + col_names = ['timestamp_first_req', 'timestamp_last_req', 'total_time', 'batch_size'] + + with open(csv_file_path, 'w', newline='') as file: + writer = csv.writer(file) + writer.writerow(col_names) """ Create a monitored KPI for a specific service and add it to the Monitoring Client @@ -561,10 +574,13 @@ class l3_centralizedattackdetectorServiceServicerImpl(L3Centralizedattackdetecto @safe_and_metered_rpc_method(METRICS_POOL, LOGGER) def AnalyzeConnectionStatistics(self, request, context): # Perform inference with the data sent in the request + if len(self.active_requests) == 0: + self.first_batch_request_time = time.perf_counter() self.active_requests.append(request) if len(self.active_requests) == BATCH_SIZE: - logging.info("Performing inference...") + + logging.debug("Performing inference... {}".format(self.replica_uuid)) inference_time_start = time.time() cryptomining_detector_output = self.perform_distributed_inference(self.active_requests) @@ -711,6 +727,18 @@ class l3_centralizedattackdetectorServiceServicerImpl(L3Centralizedattackdetecto # return Empty(message="Ok, information received (no attack detected)") self.active_requests = [] + + + csv_file_path = 'cad_metrics.csv' + self.last_batch_request_time = time.perf_counter() + + col_values = [self.first_batch_request_time, self.last_batch_request_time, + self.last_batch_request_time - self.first_batch_request_time, BATCH_SIZE] + + with open(csv_file_path, 'a', newline='') as file: + writer = csv.writer(file) + writer.writerow(col_values) + return Empty(message="Ok, metrics processed") return Empty(message="Ok, information received")