Commit eb15386f authored by Lluis Gifre Renom's avatar Lluis Gifre Renom
Browse files

DLT component - Performance Assessment Tool:

- Moved tool to a diferent folder
- Improved selection of random operations
- Added missing metrics reporting
- Added Readme file
- Added launch script
- Added preliminary results
parent 3e9b528c
Loading
Loading
Loading
Loading
+1001 −0

File added.

Preview size limit exceeded, changes collapsed.

+42 −0
Original line number Diff line number Diff line
# DLT Gateway+Blockchain Performance Assessment Test

This test assesses the performance of the DLT component's Gateway + HyperLedger Fabric Blockchain.

To carry on that, it first creates a number of random devices, links, services, and slices.
Then, it performs some random creations, retrievals, updates, and deletes of random devices, links,
services, and slices.

For each operation and record type, the size of the entities in bytes, the number of endpoints,
constraints, config rules, subservices, and subslices is recorded.
Besides, it is recorded also the time to store/retrieve the records in the blockchain, and the delay
between the change and the reception of the asynchronous notification event.

## Scenario prepararion:
Create a docker virtual network:

```(bash)
docker network rm tfs-br
docker network create -d bridge --subnet=172.254.254.0/24 --gateway=172.254.254.1 --ip-range=172.254.254.0/24 tfs-br
```

Build the DLT Gateway component's Docker image:
```(bash)
docker build -t dlt-gateway:test -f ./src/dlt/gateway/Dockerfile .
```

Start the DLT Gateway component:
```(bash)
docker run --name dlt-gateway -d -p 50051:50051 --network=tfs-br dlt-gateway:test
```

Install possibly missing requirements:
```(bash)
pip install grpcio==1.47.0 grpcio-tools==1.47.0 protobuf==3.20.1
```

Start the performance assessment:
```(bash)
PYTHONPATH=./src python -m dlt.performance
```

The test produces a CSV file with the results per operation.
+45 −25
Original line number Diff line number Diff line
@@ -12,19 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.

# vary operation: create, update, delete
# vary record type (device, link, service, slice)
# for devices, services, slices, vary size: num endpoints, num constraints, num config rules
# measure load/store time
# measure event notification time

# docker build -t dlt-gateway:test -f ./src/dlt/gateway/Dockerfile .
# docker run --name dlt-gateway -d -p 50051:50051 --network=tfs-br dlt-gateway:test
# pip install grpcio==1.47.0 grpcio-tools==1.47.0 protobuf==3.20.1
# PYTHONPATH=./src python
# PYTHONPATH=./src python -m dlt.connector.tests.performance

import functools, logging, sys, time
import functools, logging, pathlib, sys, time
from common.proto.dlt_gateway_pb2 import DltRecordEvent
from dlt.connector.client.DltGatewayClient import DltGatewayClient
from dlt.connector.client.DltEventsCollector import DltEventsCollector
@@ -34,12 +22,21 @@ from .play_ground import PlayGround
DLT_GATEWAY_HOST     = '172.254.254.2'
DLT_GATEWAY_PORT     = 50051

NUM_ACTIONS = 100
NUM_INITIAL_DEVICES  = 20
NUM_INITIAL_LINKS    = 20
NUM_INITIAL_SERVICES = 20
NUM_INITIAL_SLICES   = 20

NUM_ACTIONS          = 1000
REPORT_EVERY         = 5
DELAY_FOR_EVENTS     = 5
DOMAIN_UUID          = 'perf-test-fake-domain'

CSV_FILEPATH = 'data/perf/scenario_2/dlt/2023-05May-30/response_time'
OUTPUT_FOLDER        = 'data/perf/scenario_2/dlt/2023-05May-31'
CSV_FILEPATH         = OUTPUT_FOLDER + '/response_time.csv'

logging.basicConfig(level=logging.INFO)
LOG_FORMAT = "[%(asctime)s] %(levelname)s:%(name)s:%(message)s"
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
LOGGER = logging.getLogger(__name__)

def _event_handler(play_ground : PlayGround, event : DltRecordEvent) -> None:
@@ -58,7 +55,7 @@ def _event_handler(play_ground : PlayGround, event : DltRecordEvent) -> None:

    #event_time = event.event.timestamp.timestamp
    record_uuid = event.record_id.record_uuid.uuid
    play_ground.perf_data.get(action, record_type, record_uuid).set_time_notified(time.time())
    play_ground.perf_data.add_notif_time(action, record_type, record_uuid, time.time())
    return None

def main() -> None:
@@ -73,22 +70,45 @@ def main() -> None:

    time.sleep(3)

    LOGGER.info('Starting {:d} actions...'.format(NUM_ACTIONS))
    LOGGER.info('Adding {:d} initial devices...'.format(NUM_INITIAL_DEVICES))
    for _ in range(NUM_INITIAL_DEVICES):
        play_ground.create_device()

    LOGGER.info('Adding {:d} initial links...'.format(NUM_INITIAL_LINKS))
    for _ in range(NUM_INITIAL_LINKS):
        play_ground.create_link()

    LOGGER.info('Adding {:d} initial services...'.format(NUM_INITIAL_SERVICES))
    for _ in range(NUM_INITIAL_SERVICES):
        play_ground.create_service()

    LOGGER.info('Adding {:d} initial slices...'.format(NUM_INITIAL_SLICES))
    for _ in range(NUM_INITIAL_SLICES):
        play_ground.create_slice()

    # Otherwise, only get/update/delete is performed and play_ground becomes empty
    play_ground.perf_data.clear_operation_counters()

    LOGGER.info('Starting {:d} actions...'.format(NUM_ACTIONS))
    num_action = 0
    while num_action < NUM_ACTIONS:
        if num_action > 0 and num_action % 10 == 0:
        if num_action > 0 and num_action % REPORT_EVERY == 0:
            str_stats = play_ground.perf_data.stats_to_str()
            MSG = 'Running action {:d}/{:d}...\n{:s}'
            LOGGER.info(MSG.format(num_action, NUM_ACTIONS, str_stats))
        completed = play_ground.run_random_operation()
        if not completed: continue
        num_action += 1
        if completed: num_action += 1

    str_stats = play_ground.perf_data.stats_to_str()
    LOGGER.info('Completed {:d} actions!\n{:s}'.format(NUM_ACTIONS, str_stats))

    LOGGER.info('Completed {:d} actions!'.format(NUM_ACTIONS))
    LOGGER.info('Waiting {:f} for last events...'.format(DELAY_FOR_EVENTS))
    time.sleep(DELAY_FOR_EVENTS)

    dltgateway_collector.stop()

    LOGGER.info('Writing results...')
    pathlib.Path(OUTPUT_FOLDER).mkdir(parents=True, exist_ok=True)
    play_ground.perf_data.to_csv(CSV_FILEPATH)
    return 0

+50 −9
Original line number Diff line number Diff line
@@ -60,6 +60,25 @@ def dlt_record_set(
    reply = dltgateway_client.RecordToDlt(dlt_req)
    perf_point.set_time_replied(time.time())

    perf_point.set_size_bytes(len(dlt_req.data_json))   # pylint: disable=no-member
    if isinstance(objekt, Device):
        perf_point.set_num_config_rules(len(objekt.device_config.config_rules))
        perf_point.set_num_endpoints(len(objekt.device_endpoints))
    elif isinstance(objekt, Link):
        perf_point.set_num_endpoints(len(objekt.link_endpoint_ids))
    elif isinstance(objekt, Service):
        perf_point.set_num_config_rules(len(objekt.service_config.config_rules))
        perf_point.set_num_constraints(len(objekt.service_constraints))
        perf_point.set_num_endpoints(len(objekt.service_endpoint_ids))
    elif isinstance(objekt, Slice):
        perf_point.set_num_config_rules(len(objekt.slice_config.config_rules))
        perf_point.set_num_constraints(len(objekt.slice_constraints))
        perf_point.set_num_endpoints(len(objekt.slice_endpoint_ids))
        perf_point.set_num_sub_services(len(objekt.slice_service_ids))
        perf_point.set_num_sub_slices(len(objekt.slice_subslice_ids))
    else:
        raise NotImplementedError('Object({:s}) not supported'.format(str(type(objekt))))

    return reply

def dlt_record_found(record : DltRecord) -> bool:
@@ -81,12 +100,34 @@ def dlt_record_get(
    dlt_rec_id.record_uuid.uuid = record_uuid   # pylint: disable=no-member

    perf_point.set_time_requested(time.time())
    reply = dltgateway_client.GetFromDlt(dlt_rec_id)
    dlt_rep = dltgateway_client.GetFromDlt(dlt_rec_id)
    perf_point.set_time_replied(time.time())

    if dlt_record_found(reply): return json.loads(reply.data_json)

    if not dlt_record_found(dlt_rep):
        MSG = 'DltRecord({:s}/{:s}/{:s}) not found'
        str_record_type = DltRecordTypeEnum.Name(record_type)
        msg = MSG.format(str(domain_uuid), str_record_type, str(record_uuid))
        raise Exception(msg) # pylint: disable=broad-exception-raised

    data : Dict = json.loads(dlt_rep.data_json)

    perf_point.set_size_bytes(len(dlt_rep.data_json))
    if 'device_id' in data:
        perf_point.set_num_config_rules(len(data.get('device_config', {}).get('config_rules')))
        perf_point.set_num_endpoints(len(data.get('device_endpoints', [])))
    elif 'link_id' in data:
        perf_point.set_num_endpoints(len(data.get('link_endpoint_ids', [])))
    elif 'service_id' in data:
        perf_point.set_num_config_rules(len(data.get('service_config', []).get('config_rules')))
        perf_point.set_num_constraints(len(data.get('service_constraints', [])))
        perf_point.set_num_endpoints(len(data.get('service_endpoint_ids', [])))
    elif 'slice_id' in data:
        perf_point.set_num_config_rules(len(data.get('slice_config', []).get('config_rules')))
        perf_point.set_num_constraints(len(data.get('slice_constraints', [])))
        perf_point.set_num_endpoints(len(data.get('slice_endpoint_ids', [])))
        perf_point.set_num_sub_services(len(data.get('slice_service_ids', [])))
        perf_point.set_num_sub_slices(len(data.get('slice_subslice_ids', [])))
    else:
        raise NotImplementedError('Object({:s}) not supported'.format(str(data)))

    return data
Loading