Skip to content
Snippets Groups Projects
Select Git revision
  • 06006a765285f39a1f5e867db8b9492c6839dcad
  • master default protected
  • release/6.0.0 protected
  • develop protected
  • feat/345-cttc-fix-ci-cd-and-unit-tests-for-dscm-pluggables
  • feat/349-new-monitoring-updates-for-optical-controller-integration
  • cnit_ofc26
  • ofc_polimi
  • CTTC-IMPLEMENT-NBI-CONNECTOR-NOS-ZTP
  • CTTC-TEST-SMARTNICS-6GMICROSDN-ZTP
  • feat/327-tid-new-service-to-ipowdm-controller-to-manage-transceivers-configuration-on-external-agent
  • cnit_tapi
  • feat/330-tid-pcep-component
  • feat/tid-newer-pcep-component
  • feat/116-ubi-updates-in-telemetry-backend-to-support-p4-in-band-network-telemetry
  • feat/292-cttc-implement-integration-test-for-ryu-openflow
  • cnit-p2mp-premerge
  • feat/325-tid-nbi-e2e-to-manage-e2e-path-computation
  • feat/326-tid-external-management-of-devices-telemetry-nbi
  • feat/324-tid-nbi-ietf_l3vpn-deploy-fail
  • feat/321-add-support-for-gnmi-configuration-via-proto
  • v6.0.0 protected
  • v5.0.0 protected
  • v4.0.0 protected
  • demo-dpiab-eucnc2024
  • v3.0.0 protected
  • v2.1.0 protected
  • v2.0.0 protected
  • v1.0.0 protected
29 results

perfTest.js

Blame
  • jjdiaz's avatar
    Javier Diaz authored
    99279831
    History
    Code owners
    Assign users and groups as approvers for specific file changes. Learn more.
    perfTest.js 4.28 KiB
    // Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
    //
    // Licensed under the Apache License, Version 2.0 (the "License");
    // you may not use this file except in compliance with the License.
    // You may obtain a copy of the License at
    //
    //      http://www.apache.org/licenses/LICENSE-2.0
    //
    // Unless required by applicable law or agreed to in writing, software
    // distributed under the License is distributed on an "AS IS" BASIS,
    // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
    // See the License for the specific language governing permissions and
    // limitations under the License.
    
    const { connectToNetwork } = require('../dltApp/dist/fabricConnect');
    const fsp = require('fs').promises;
    const fs = require('fs');
    const util = require('util');
    
    const utf8Decoder = new TextDecoder();
    const topoDirectory = '../samples/';
    const topologies = ['topo1.json', 'topo2.json', 'topo3.json', 'topo4.json'];
    //const topologies = ['topo4.json'];
    
    const iterations = 1000;
    
    async function main() {
        try {
            const { contract, close } = await connectToNetwork();
            for (const topoFile of topologies) {
                const logFilePath = `./operation_times_${topoFile.split('.')[0]}.txt`; // Creates a separate logfile for each topology
                const appendFile = util.promisify(fs.appendFile.bind(fs, logFilePath)); 
                
                console.log(`Starting tests for ${topoFile}`);
                for (let i = 0; i < iterations; i++) {
                    console.log(`Iteration ${i + 1} for ${topoFile}`);
                    await runBlockchainOperations(contract, topoFile, appendFile); 
                }
            }
                    await close(); // Clean up the connection
        } catch (error) {
            console.error('An error occurred:', error);
        }
    }
    
    async function runBlockchainOperations(contract, topoFile, appendFile) {
        const assetId = `asset${Date.now()}`;
        const jsonData = await readJsonData(`${topoDirectory}${topoFile}`);
    
        // Define operations
        const operations = [
            { type: 'STORE', jsonData },
            { type: 'UPDATE', jsonData },
            { type: 'FETCH', jsonData: null },
            { type: 'DELETE', jsonData: null },
            { type: 'FETCH_ALL', jsonData: null }
        ];
    
        for (let op of operations) {
            await executeOperation(contract, op.type, assetId, op.jsonData, appendFile);
        }
    }
    
    async function readJsonData(filePath) {
        try {
            return await fsp.readFile(filePath, 'utf8');
        } catch (error) {
            console.error(`Failed to read file: ${filePath}`, error);
            return '{}';
        }
    }
    
    async function executeOperation(contract, operationType, assetId, jsonData, appendFile) {
        const startTime = process.hrtime.bigint();
        try {
            let result;
            switch (operationType) {
                case 'STORE':
                    result = await contract.submitTransaction('StoreTopoData', assetId, jsonData);
                    break;
                case 'UPDATE':
                    result = await contract.submitTransaction('UpdateTopoData', assetId, jsonData);
                    break;
                case 'FETCH':
                    result = await contract.evaluateTransaction('RetrieveTopoData', assetId);
                    break;
                case 'DELETE':
                    result = await contract.submitTransaction('DeleteTopo', assetId);
                    break;
                case 'FETCH_ALL':
                    result = await contract.evaluateTransaction('GetAllInfo');
                    break;
            }
            result = utf8Decoder.decode(result);
            const operationTime = recordOperationTime(startTime);
            await logOperationTime(operationTime, operationType, appendFile);
            console.log(`${operationType} Result:`, result);
        } catch (error) {
            console.error(`Error during ${operationType}:`, error);
        }
    }
    
    function recordOperationTime(startTime) {
        const endTime = process.hrtime.bigint();
        const operationTime = Number(endTime - startTime) / 1e6;
        return operationTime;
    }
    
    async function logOperationTime(operationTime, operationType, appendFile) {
        const timestamp = Date.now();
        const logEntry = `${timestamp} - ${operationType} - Execution time: ${operationTime.toFixed(3)} ms\n`;
        try {
            await appendFile(logEntry);
        } catch (error) {
            console, error('Error writing to log file:', error);
        }
    }
    
    main();