Skip to content
Snippets Groups Projects
Commit 4baade3d authored by Lluis Gifre Renom's avatar Lluis Gifre Renom
Browse files

Merge branch 'feat/tool-perf-plots-generator' into 'develop'

Tools - Perf Plots Generator

See merge request !102
parents 05fbb8f9 fae3b6a7
No related branches found
No related tags found
2 merge requests!142Release TeraFlowSDN 2.1,!102Tools - Perf Plots Generator
Showing
with 859 additions and 0 deletions
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json, random, uuid
from typing import Dict, Tuple
from compute.service.rest_server.nbi_plugins.ietf_network_slice.bindings.network_slice_services import (
NetworkSliceServices
)
# R1 emulated devices
# Port 13-0 is Optical
# Port 13-1 is Copper
R1_UUID = "ed2388eb-5fb9-5888-a4f4-160267d3e19b"
R1_PORT_13_0_UUID_OPTICAL = "20440915-1a6c-5e7b-a80f-b0e0e51f066d"
R1_PORT_13_1_UUID_COPPER = "ff900d5d-2ac0-576c-9628-a2d016681f9d"
# R2 emulated devices
# Port 13-0 is Optical
# Port 13-1 is Copper
R2_UUID = "49ce0312-1274-523b-97b8-24d0eca2d72d"
R2_PORT_13_0_UUID_OPTICAL = "214618cb-b63b-5e66-84c2-45c1c016e5f0"
R2_PORT_13_1_UUID_COPPER = "4e0f7fb4-5d22-56ad-a00e-20bffb4860f9"
# R3 emulated devices
# Port 13-0 is Optical
# Port 13-1 is Copper
R3_UUID = "3bc8e994-a3b9-5f60-9c77-6608b1d08313"
R3_PORT_13_0_UUID_OPTICAL = "da5196f5-d651-5def-ada6-50ed6430279d"
R3_PORT_13_1_UUID_COPPER = "43d221fa-5701-5740-a129-502131f5bda2"
# R4 emulated devices
# Port 13-0 is Optical
# Port 13-1 is Copper
R4_UUID = "b43e6361-2573-509d-9a88-1793e751b10d"
R4_PORT_13_0_UUID_OPTICAL = "241b74a7-8677-595c-ad65-cc9093c1e341"
R4_PORT_13_1_UUID_COPPER = "c57abf46-caaf-5954-90cc-1fec0a69330e"
node_dict = {R1_PORT_13_1_UUID_COPPER: R1_UUID,
R2_PORT_13_1_UUID_COPPER: R2_UUID,
R3_PORT_13_1_UUID_COPPER: R3_UUID,
R4_PORT_13_1_UUID_COPPER: R4_UUID}
list_endpoints = [R1_PORT_13_1_UUID_COPPER,
R2_PORT_13_1_UUID_COPPER,
R3_PORT_13_1_UUID_COPPER,
R4_PORT_13_1_UUID_COPPER]
list_availability= [99, 99.9, 99.99, 99.999, 99.9999]
list_bw = [10, 40, 50, 100, 150, 200, 400]
list_owner = ["Telefonica", "CTTC", "Telenor", "ADVA", "Ubitech", "ATOS"]
URL_POST = "/restconf/data/ietf-network-slice-service:ietf-nss/network-slice-services"
URL_DELETE = "/restconf/data/ietf-network-slice-service:ietf-nss/network-slice-services/slice-service="
def generate_request(seed: str) -> Tuple[Dict, str]:
ns = NetworkSliceServices()
# Slice 1
suuid = str(uuid.uuid5(uuid.NAMESPACE_DNS, str(seed)))
slice1 = ns.slice_service[suuid]
slice1.service_description = "Test slice for OFC 2023 demo"
slice1.status().admin_status().status = "Planned" # TODO not yet mapped
# SDPS: R1 optical to R3 optical
sdps1 = slice1.sdps().sdp
while True:
ep1_uuid = random.choice(list_endpoints)
ep2_uuid = random.choice(list_endpoints)
if ep1_uuid != ep2_uuid:
break
sdps1[ep1_uuid].node_id = node_dict.get(ep1_uuid)
sdps1[ep2_uuid].node_id = node_dict.get(ep2_uuid)
# Connectivity group: Connection construct and 2 sla constrains:
# - Bandwidth
# - Availability
cg_uuid = str(uuid.uuid4())
cg = slice1.connection_groups().connection_group
cg1 = cg[cg_uuid]
cc1 = cg1.connectivity_construct[0]
cc1.cc_id = 5
p2p = cc1.connectivity_construct_type.p2p()
p2p.p2p_sender_sdp = ep1_uuid
p2p.p2p_receiver_sdp = ep2_uuid
slo_custom = cc1.slo_sle_policy.custom()
metric_bounds = slo_custom.service_slo_sle_policy().metric_bounds().metric_bound
# SLO Bandwidth
slo_bandwidth = metric_bounds["service-slo-two-way-bandwidth"]
slo_bandwidth.value_description = "Guaranteed bandwidth"
slo_bandwidth.bound = int(random.choice(list_bw))
slo_bandwidth.metric_unit = "Gbps"
# SLO Availability
slo_availability = metric_bounds["service-slo-availability"]
slo_availability.value_description = "Guaranteed availability"
slo_availability.metric_unit = "percentage"
slo_availability.bound = random.choice(list_availability)
json_request = {"data": ns.to_json()}
#Last, add name and owner manually
list_name_owner = [{"tag-type": "owner", "value": random.choice(list_owner)}]
json_request["data"]["ietf-network-slice-service:network-slice-services"]["slice-service"][0]["service-tags"] = list_name_owner
return (json_request, suuid)
if __name__ == "__main__":
request = generate_request(123)
print(json.dumps(request[0], sort_keys=True, indent=4))
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime, re
from typing import Dict, List, Optional, Tuple
from .tools.FileSystem import create_folders
from .tools.HistogramData import HistogramData
from .tools.Plotter import plot_histogram
from .tools.Prometheus import get_prometheus_range, get_prometheus_series_names
from .tools.Histogram import results_to_histograms, save_histograms, unaccumulate_histograms
##### EXPERIMENT SETTINGS ##############################################################################################
EXPERIMENT_NAME = 'L2VPN with Emulated'
EXPERIMENT_ID = 'l2vpn-emu'
TIME_START = datetime.datetime(2023, 5, 4, 6, 45, 0, 0, tzinfo=datetime.timezone.utc)
TIME_END = datetime.datetime(2023, 5, 4, 10, 15, 0, 0, tzinfo=datetime.timezone.utc)
TIME_STEP = '1m'
LABEL_FILTERS = {}
##### ENVIRONMENT SETTINGS #############################################################################################
PROM_ADDRESS = '127.0.0.1'
PROM_PORT = 9090
OUT_FOLDER = 'data/perf/'
##### PLOT-SPECIFIC CUSTOMIZATIONS #####################################################################################
EXPERIMENT_ID += '/component-rpcs'
SERIES_MATCH = 'tfs_.+_rpc_.+_histogram_duration_bucket'
RE_SERIES_NAME = re.compile(r'^tfs_(.+)_rpc_(.+)_histogram_duration_bucket$')
SERIES_LABELS = []
SUBSYSTEMS_MAPPING = {
'context': {
'context' : 'context',
'topolog' : 'topology',
'device' : 'device',
'endpoint' : 'device',
'link' : 'link',
'service' : 'service',
'slice' : 'slice',
'policyrule': 'policyrule',
'connection': 'connection',
}
}
def get_subsystem(component : str, rpc_method : str) -> Optional[str]:
return next(iter([
subsystem
for pattern,subsystem in SUBSYSTEMS_MAPPING.get(component, {}).items()
if pattern in rpc_method
]), None)
def update_keys(component : str, rpc_method : str) -> Tuple[Tuple, Tuple]:
subsystem = get_subsystem(component, rpc_method)
collection_keys = (component, subsystem)
histogram_keys = (rpc_method,)
return collection_keys, histogram_keys
def get_plot_specs(folders : Dict[str, str], component : str, subsystem : Optional[str]) -> Tuple[str, str]:
if subsystem is None:
title = '{:s} - RPC Methods [{:s}]'.format(component.title(), EXPERIMENT_NAME)
filepath = '{:s}/{:s}.png'.format(folders['png'], component)
else:
title = '{:s} - RPC Methods - {:s} [{:s}]'.format(component.title(), subsystem.title(), EXPERIMENT_NAME)
filepath = '{:s}/{:s}-{:s}.png'.format(folders['png'], component, subsystem)
return title, filepath
##### AUTOMATED CODE ###################################################################################################
def get_series_names(folders : Dict[str, str]) -> List[str]:
series_names = get_prometheus_series_names(
PROM_ADDRESS, PROM_PORT, SERIES_MATCH, TIME_START, TIME_END,
raw_json_filepath='{:s}/_series.json'.format(folders['json'])
)
return series_names
def get_histogram_data(series_name : str, folders : Dict[str, str]) -> Dict[Tuple, HistogramData]:
m = RE_SERIES_NAME.match(series_name)
if m is None:
# pylint: disable=broad-exception-raised
raise Exception('Unparsable series name: {:s}'.format(str(series_name)))
extra_labels = m.groups()
results = get_prometheus_range(
PROM_ADDRESS, PROM_PORT, series_name, LABEL_FILTERS, TIME_START, TIME_END, TIME_STEP,
raw_json_filepath='{:s}/_raw_{:s}.json'.format(folders['json'], series_name)
)
histograms = results_to_histograms(results, SERIES_LABELS, extra_labels=extra_labels)
unaccumulate_histograms(histograms, process_bins=True, process_timestamps=False)
save_histograms(histograms, folders['csv'])
return histograms
def main() -> None:
histograms_collection : Dict[Tuple, Dict[Tuple, HistogramData]] = dict()
folders = create_folders(OUT_FOLDER, EXPERIMENT_ID)
series_names = get_series_names(folders)
for series_name in series_names:
histograms = get_histogram_data(series_name, folders)
for histogram_keys, histogram_data in histograms.items():
collection_keys,histogram_keys = update_keys(*histogram_keys)
histograms = histograms_collection.setdefault(collection_keys, dict())
histograms[histogram_keys] = histogram_data
for histogram_keys,histograms in histograms_collection.items():
title, filepath = get_plot_specs(folders, *histogram_keys)
plot_histogram(histograms, filepath, title=title)
if __name__ == '__main__':
main()
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime, re
from typing import Dict, List, Optional, Tuple
from .tools.FileSystem import create_folders
from .tools.HistogramData import HistogramData
from .tools.Plotter import plot_histogram
from .tools.Prometheus import get_prometheus_range, get_prometheus_series_names
from .tools.Histogram import results_to_histograms, save_histograms, unaccumulate_histograms
##### EXPERIMENT SETTINGS ##############################################################################################
EXPERIMENT_NAME = 'L2VPN with Emulated'
EXPERIMENT_ID = 'l2vpn-emu'
TIME_START = datetime.datetime(2023, 5, 4, 6, 45, 0, 0, tzinfo=datetime.timezone.utc)
TIME_END = datetime.datetime(2023, 5, 4, 10, 15, 0, 0, tzinfo=datetime.timezone.utc)
TIME_STEP = '1m'
LABEL_FILTERS = {
#'driver': 'emulated',
#'operation': 'configure_device', # add_device / configure_device
#'step': 'get_device',
}
##### ENVIRONMENT SETTINGS #############################################################################################
PROM_ADDRESS = '127.0.0.1'
PROM_PORT = 9090
OUT_FOLDER = 'data/perf/'
##### PLOT-SPECIFIC CUSTOMIZATIONS #####################################################################################
EXPERIMENT_ID += '/dev-drv-details'
SERIES_MATCH = 'tfs_device_execution_details_histogram_duration_bucket'
RE_SERIES_NAME = re.compile(r'^tfs_device_execution_details_histogram_duration_bucket$')
SERIES_LABELS = ['driver', 'operation', 'step']
def update_keys(driver : str, operation : str, step : str) -> Tuple[Tuple, Tuple]:
collection_keys = (driver, operation)
histogram_keys = (step,)
return collection_keys, histogram_keys
def get_plot_specs(folders : Dict[str, str], driver : str, operation : str) -> Tuple[str, str]:
title = 'Device Driver - {:s} - {:s}'.format(driver.title(), operation.replace('_', '').title())
filepath = '{:s}/{:s}-{:s}.png'.format(folders['png'], driver, operation)
return title, filepath
##### AUTOMATED CODE ###################################################################################################
def get_series_names(folders : Dict[str, str]) -> List[str]:
series_names = get_prometheus_series_names(
PROM_ADDRESS, PROM_PORT, SERIES_MATCH, TIME_START, TIME_END,
raw_json_filepath='{:s}/_series.json'.format(folders['json'])
)
return series_names
def get_histogram_data(series_name : str, folders : Dict[str, str]) -> Dict[Tuple, HistogramData]:
m = RE_SERIES_NAME.match(series_name)
if m is None:
# pylint: disable=broad-exception-raised
raise Exception('Unparsable series name: {:s}'.format(str(series_name)))
extra_labels = m.groups()
results = get_prometheus_range(
PROM_ADDRESS, PROM_PORT, series_name, LABEL_FILTERS, TIME_START, TIME_END, TIME_STEP,
raw_json_filepath='{:s}/_raw_{:s}.json'.format(folders['json'], series_name)
)
histograms = results_to_histograms(results, SERIES_LABELS, extra_labels=extra_labels)
unaccumulate_histograms(histograms, process_bins=True, process_timestamps=False)
save_histograms(histograms, folders['csv'])
return histograms
def main() -> None:
histograms_collection : Dict[Tuple, Dict[Tuple, HistogramData]] = dict()
folders = create_folders(OUT_FOLDER, EXPERIMENT_ID)
series_names = get_series_names(folders)
for series_name in series_names:
histograms = get_histogram_data(series_name, folders)
for histogram_keys, histogram_data in histograms.items():
collection_keys,histogram_keys = update_keys(*histogram_keys)
histograms = histograms_collection.setdefault(collection_keys, dict())
histograms[histogram_keys] = histogram_data
for histogram_keys,histograms in histograms_collection.items():
title, filepath = get_plot_specs(folders, *histogram_keys)
plot_histogram(histograms, filepath, title=title)
if __name__ == '__main__':
main()
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime, re
from typing import Dict, List, Tuple
from .tools.FileSystem import create_folders
from .tools.HistogramData import HistogramData
from .tools.Plotter import plot_histogram
from .tools.Prometheus import get_prometheus_range, get_prometheus_series_names
from .tools.Histogram import results_to_histograms, save_histograms, unaccumulate_histograms
##### EXPERIMENT SETTINGS ##############################################################################################
EXPERIMENT_NAME = 'L2VPN with Emulated'
EXPERIMENT_ID = 'l2vpn-emu'
TIME_START = datetime.datetime(2023, 5, 4, 6, 45, 0, 0, tzinfo=datetime.timezone.utc)
TIME_END = datetime.datetime(2023, 5, 4, 10, 15, 0, 0, tzinfo=datetime.timezone.utc)
TIME_STEP = '1m'
LABEL_FILTERS = {
#'driver': 'emulated',
}
##### ENVIRONMENT SETTINGS #############################################################################################
PROM_ADDRESS = '127.0.0.1'
PROM_PORT = 9090
OUT_FOLDER = 'data/perf/'
##### PLOT-SPECIFIC CUSTOMIZATIONS #####################################################################################
EXPERIMENT_ID += '/dev-drv-methods'
SERIES_MATCH = 'tfs_device_driver_.+_histogram_duration_bucket'
RE_SERIES_NAME = re.compile(r'^tfs_device_driver_(.+)_histogram_duration_bucket$')
SERIES_LABELS = ['driver']
def update_keys(driver : str, method : str) -> Tuple[Tuple, Tuple]:
collection_keys = (driver,)
histogram_keys = (method,)
return collection_keys, histogram_keys
def get_plot_specs(folders : Dict[str, str], driver : str) -> Tuple[str, str]:
title = 'Device Driver - {:s}'.format(driver.title())
filepath = '{:s}/{:s}.png'.format(folders['png'], driver)
return title, filepath
##### AUTOMATED CODE ###################################################################################################
def get_series_names(folders : Dict[str, str]) -> List[str]:
series_names = get_prometheus_series_names(
PROM_ADDRESS, PROM_PORT, SERIES_MATCH, TIME_START, TIME_END,
raw_json_filepath='{:s}/_series.json'.format(folders['json'])
)
return series_names
def get_histogram_data(series_name : str, folders : Dict[str, str]) -> Dict[Tuple, HistogramData]:
m = RE_SERIES_NAME.match(series_name)
if m is None:
# pylint: disable=broad-exception-raised
raise Exception('Unparsable series name: {:s}'.format(str(series_name)))
extra_labels = m.groups()
results = get_prometheus_range(
PROM_ADDRESS, PROM_PORT, series_name, LABEL_FILTERS, TIME_START, TIME_END, TIME_STEP,
raw_json_filepath='{:s}/_raw_{:s}.json'.format(folders['json'], series_name)
)
histograms = results_to_histograms(results, SERIES_LABELS, extra_labels=extra_labels)
unaccumulate_histograms(histograms, process_bins=True, process_timestamps=False)
save_histograms(histograms, folders['csv'])
return histograms
def main() -> None:
histograms_collection : Dict[Tuple, Dict[Tuple, HistogramData]] = dict()
folders = create_folders(OUT_FOLDER, EXPERIMENT_ID)
series_names = get_series_names(folders)
for series_name in series_names:
histograms = get_histogram_data(series_name, folders)
for histogram_keys, histogram_data in histograms.items():
collection_keys,histogram_keys = update_keys(*histogram_keys)
histograms = histograms_collection.setdefault(collection_keys, dict())
histograms[histogram_keys] = histogram_data
for histogram_keys,histograms in histograms_collection.items():
title, filepath = get_plot_specs(folders, *histogram_keys)
plot_histogram(histograms, filepath, title=title)
if __name__ == '__main__':
main()
# Tool: Perf Plots Generator:
Simple tool to gather performance data from Prometheus and produce histogram plots.
## Example:
- Ensure your MicroK8s includes the monitoring addon and your deployment specs the service monitors.
- Deploy TeraFlowSDN controller with your specific settings:
```(bash)
cd ~/tfs-ctrl
source my_deploy.sh
./deploy.sh
```
- Execute the test you want to meter.
- Select the appropriate script:
- Device_Driver_Methods : To report Device Driver Methods
- Device_Driver_Details : To report Device Add/Configure Details
- Service_Handler_Methods : To report Service Handler Methods
- Component_RPC_Methods : To report Component RPC Methods
- Tune the experiment settings
- Execute the report script:
```(bash)
PYTHONPATH=./src python -m tests.tools.perf_plots.<script>
```
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime, re
from typing import Dict, List, Tuple
from .tools.FileSystem import create_folders
from .tools.HistogramData import HistogramData
from .tools.Plotter import plot_histogram
from .tools.Prometheus import get_prometheus_range, get_prometheus_series_names
from .tools.Histogram import results_to_histograms, save_histograms, unaccumulate_histograms
##### EXPERIMENT SETTINGS ##############################################################################################
EXPERIMENT_NAME = 'L2VPN with Emulated'
EXPERIMENT_ID = 'l2vpn-emu'
TIME_START = datetime.datetime(2023, 5, 4, 6, 45, 0, 0, tzinfo=datetime.timezone.utc)
TIME_END = datetime.datetime(2023, 5, 4, 10, 15, 0, 0, tzinfo=datetime.timezone.utc)
TIME_STEP = '1m'
LABEL_FILTERS = {
#'handler': 'l2nm_emulated',
}
##### ENVIRONMENT SETTINGS #############################################################################################
PROM_ADDRESS = '127.0.0.1'
PROM_PORT = 9090
OUT_FOLDER = 'data/perf/'
##### PLOT-SPECIFIC CUSTOMIZATIONS #####################################################################################
EXPERIMENT_ID += '/svc-hdl-methods'
SERIES_MATCH = 'tfs_service_handler_.+_histogram_duration_bucket'
RE_SERIES_NAME = re.compile(r'^tfs_service_handler_(.+)_histogram_duration_bucket$')
SERIES_LABELS = ['handler']
def update_keys(handler : str, method : str) -> Tuple[Tuple, Tuple]:
collection_keys = (handler,)
histogram_keys = (method,)
return collection_keys, histogram_keys
def get_plot_specs(folders : Dict[str, str], handler : str) -> Tuple[str, str]:
title = 'Service Handler - {:s}'.format(handler.title())
filepath = '{:s}/{:s}.png'.format(folders['png'], handler)
return title, filepath
##### AUTOMATED CODE ###################################################################################################
def get_series_names(folders : Dict[str, str]) -> List[str]:
series_names = get_prometheus_series_names(
PROM_ADDRESS, PROM_PORT, SERIES_MATCH, TIME_START, TIME_END,
raw_json_filepath='{:s}/_series.json'.format(folders['json'])
)
return series_names
def get_histogram_data(series_name : str, folders : Dict[str, str]) -> Dict[Tuple, HistogramData]:
m = RE_SERIES_NAME.match(series_name)
if m is None:
# pylint: disable=broad-exception-raised
raise Exception('Unparsable series name: {:s}'.format(str(series_name)))
extra_labels = m.groups()
results = get_prometheus_range(
PROM_ADDRESS, PROM_PORT, series_name, LABEL_FILTERS, TIME_START, TIME_END, TIME_STEP,
raw_json_filepath='{:s}/_raw_{:s}.json'.format(folders['json'], series_name)
)
histograms = results_to_histograms(results, SERIES_LABELS, extra_labels=extra_labels)
unaccumulate_histograms(histograms, process_bins=True, process_timestamps=False)
save_histograms(histograms, folders['csv'])
return histograms
def main() -> None:
histograms_collection : Dict[Tuple, Dict[Tuple, HistogramData]] = dict()
folders = create_folders(OUT_FOLDER, EXPERIMENT_ID)
series_names = get_series_names(folders)
for series_name in series_names:
histograms = get_histogram_data(series_name, folders)
for histogram_keys, histogram_data in histograms.items():
collection_keys,histogram_keys = update_keys(*histogram_keys)
histograms = histograms_collection.setdefault(collection_keys, dict())
histograms[histogram_keys] = histogram_data
for histogram_keys,histograms in histograms_collection.items():
title, filepath = get_plot_specs(folders, *histogram_keys)
plot_histogram(histograms, filepath, title=title)
if __name__ == '__main__':
main()
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pathlib
from typing import Dict
def create_folders(root_folder : str, experiment_id : str) -> Dict[str, str]:
experiment_folder = root_folder + '/' + experiment_id
folders = {
'csv' : experiment_folder + '/csv' ,
'json' : experiment_folder + '/json',
'png' : experiment_folder + '/png' ,
}
for folder in folders.values():
pathlib.Path(folder).mkdir(parents=True, exist_ok=True)
return folders
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import csv
from typing import Dict, List, Tuple
from .HistogramData import HistogramData
def results_to_histograms(
results : List[Dict], key_labels : List[str], extra_labels : List[str] = []
) -> Dict[Tuple, HistogramData]:
histograms : Dict[Tuple, HistogramData] = dict()
for result in results:
metric : Dict = result['metric']
labels = [metric[l] for l in key_labels]
if len(extra_labels) > 0: labels.extend(extra_labels)
histogram_key = tuple(labels)
histogram = histograms.get(histogram_key)
if histogram is None:
histogram = histograms.setdefault(
histogram_key, HistogramData(timestamps=set(), bins=set(), data=dict()))
bin_ = float(metric['le'])
histogram.bins.add(bin_)
values : List[Tuple[int, str]] = result['values']
for timestamp,count in values:
histogram.timestamps.add(timestamp)
histogram.data.setdefault(timestamp, dict())[bin_] = int(count)
return histograms
def unaccumulate_histogram(
histogram : HistogramData, process_bins : bool = True, process_timestamps : bool = True
) -> None:
timestamps = sorted(histogram.timestamps)
bins = sorted(histogram.bins)
accumulated_over_time = {b:0 for b in bins}
for timestamp in timestamps:
bin_to_count = histogram.data.get(timestamp)
if bin_to_count is None: continue
accumulated_over_bins = 0
for bin_ in bins:
count = bin_to_count[bin_]
if process_bins:
count -= accumulated_over_bins
accumulated_over_bins += count
if process_timestamps:
count -= accumulated_over_time[bin_]
accumulated_over_time[bin_] += count
bin_to_count[bin_] = count
def unaccumulate_histograms(
histograms : Dict[Tuple, HistogramData], process_bins : bool = True, process_timestamps : bool = True
) -> None:
for histogram in histograms.values():
unaccumulate_histogram(histogram, process_bins=process_bins, process_timestamps=process_timestamps)
def save_histogram(filepath : str, histogram : HistogramData) -> None:
timestamps = sorted(histogram.timestamps)
bins = sorted(histogram.bins)
header = [''] + [str(b) for b in bins]
with open(filepath, 'w', encoding='UTF-8') as f:
writer = csv.writer(f)
writer.writerow(header)
for timestamp in timestamps:
bin_to_count = histogram.data.get(timestamp, {})
writer.writerow([timestamp] + [
str(bin_to_count.get(bin_, 0))
for bin_ in bins
])
def save_histograms(histograms : Dict[Tuple, HistogramData], data_folder : str) -> None:
for histogram_keys, histogram_data in histograms.items():
filepath = '{:s}/{:s}.csv'.format(data_folder, '__'.join(histogram_keys))
save_histogram(filepath, histogram_data)
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from dataclasses import dataclass
from typing import Dict, Set
@dataclass
class HistogramData:
timestamps : Set[int]
bins : Set[float]
data : Dict[int, Dict[float, int]]
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
import matplotlib.pyplot as plt
from typing import Dict, Optional, Tuple
from .HistogramData import HistogramData
def plot_histogram(
histograms : Dict[Tuple, HistogramData], filepath : str,
title : Optional[str] = None, label_separator : str = ' ', dpi : int = 600,
legend_loc : str = 'best', grid : bool = True
) -> None:
# plot the cumulative histogram
_, ax = plt.subplots(figsize=(8, 8))
num_series = 0
for histogram_keys, histogram_data in histograms.items():
bins = sorted(histogram_data.bins)
last_timestamp = max(histogram_data.timestamps)
counts = histogram_data.data.get(last_timestamp)
counts = [int(counts[bin_]) for bin_ in bins]
if sum(counts) == 0: continue
num_series += 1
bins.insert(0, 0)
bins = np.array(bins).astype(float)
counts = np.array(counts).astype(float)
assert len(bins) == len(counts) + 1
centroids = (bins[1:] + bins[:-1]) / 2
label = label_separator.join(histogram_keys)
ax.hist(centroids, bins=bins, weights=counts, range=(min(bins), max(bins)), density=True,
histtype='step', cumulative=True, label=label)
if num_series == 0: return
ax.grid(grid)
ax.legend(loc=legend_loc)
if title is not None: ax.set_title(title)
ax.set_xlabel('seconds')
ax.set_ylabel('Likelihood of occurrence')
plt.xscale('log')
plt.savefig(filepath, dpi=(dpi))
plt.show()
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json, requests, time
from datetime import datetime
from typing import Dict, List, Optional
def get_prometheus_series_names(
address : str, port : int, metric_match : str, time_start : datetime, time_end : datetime, timeout : int = 10,
raw_json_filepath : Optional[str] = None
) -> List[str]:
str_url = 'http://{:s}:{:d}/api/v1/label/__name__/values'.format(address, port)
params = {
'match[]': '{{__name__=~"{:s}"}}'.format(metric_match),
'start': time.mktime(time_start.timetuple()),
'end' : time.mktime(time_end.timetuple()),
}
response = requests.get(str_url, params=params, timeout=timeout)
results = response.json()
if raw_json_filepath is not None:
with open(raw_json_filepath, 'w', encoding='UTF-8') as f:
f.write(json.dumps(results, sort_keys=True))
assert results['status'] == 'success'
return results['data']
def get_prometheus_range(
address : str, port : int, metric_name : str, labels : Dict[str, str], time_start : datetime, time_end : datetime,
time_step : str, timeout : int = 10, raw_json_filepath : Optional[str] = None
) -> List[Dict]:
str_url = 'http://{:s}:{:d}/api/v1/query_range'.format(address, port)
str_query = metric_name
if len(labels) > 0:
str_labels = ', '.join(['{:s}="{:s}"'.format(name, value) for name,value in labels.items()])
str_query += '{{{:s}}}'.format(str_labels)
params = {
'query': str_query,
'start': time.mktime(time_start.timetuple()),
'end' : time.mktime(time_end.timetuple()),
'step' : time_step,
}
response = requests.get(str_url, params=params, timeout=timeout)
results = response.json()
if raw_json_filepath is not None:
with open(raw_json_filepath, 'w', encoding='UTF-8') as f:
f.write(json.dumps(results, sort_keys=True))
assert results['status'] == 'success'
assert results['data']['resultType'] == 'matrix'
return results['data']['result']
# Copyright 2022-2023 ETSI TeraFlowSDN - TFS OSG (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment