From 079238772061cfcaeb357001b9713a4095eb54cb Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sun, 26 Mar 2023 06:20:46 +0000 Subject: [PATCH 01/12] Policy component: - Fixed missing IETF_L2VPN entry in DeviceDriverEnum --- .../eu/teraflow/policy/context/model/DeviceDriverEnum.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/policy/src/main/java/eu/teraflow/policy/context/model/DeviceDriverEnum.java b/src/policy/src/main/java/eu/teraflow/policy/context/model/DeviceDriverEnum.java index daee299dd..ad763e35d 100644 --- a/src/policy/src/main/java/eu/teraflow/policy/context/model/DeviceDriverEnum.java +++ b/src/policy/src/main/java/eu/teraflow/policy/context/model/DeviceDriverEnum.java @@ -23,5 +23,6 @@ public enum DeviceDriverEnum { P4, IETF_NETWORK_TOPOLOGY, ONF_TR_352, - XR + XR, + IETF_L2VPN } -- GitLab From 89374bf9680ef181f08e746887a02e34c86ed9f7 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sun, 26 Mar 2023 06:53:57 +0000 Subject: [PATCH 02/12] PathComp component - Frontend: - Fixed unitary tests --- src/pathcomp/frontend/tests/Objects_A_B_C.py | 70 ++++++++++----- .../frontend/tests/Objects_DC_CSGW_TN.py | 88 +++++++++++------- .../frontend/tests/Objects_DC_CSGW_TN_OLS.py | 89 ++++++++++++------- src/pathcomp/frontend/tests/test_unitary.py | 63 +++++++------ 4 files changed, 194 insertions(+), 116 deletions(-) diff --git a/src/pathcomp/frontend/tests/Objects_A_B_C.py b/src/pathcomp/frontend/tests/Objects_A_B_C.py index f26d74ce4..5290123b6 100644 --- a/src/pathcomp/frontend/tests/Objects_A_B_C.py +++ b/src/pathcomp/frontend/tests/Objects_A_B_C.py @@ -80,21 +80,36 @@ DEVICE_C3_ID, DEVICE_C3_ENDPOINTS, DEVICE_C3 = compose_device('C3', ['1', '2', ' LINK_A2_C3_ID, LINK_A2_C3 = compose_link(DEVICE_A2_ENDPOINTS[2], DEVICE_C3_ENDPOINTS[2]) LINK_C1_B2_ID, LINK_C1_B2 = compose_link(DEVICE_C1_ENDPOINTS[2], DEVICE_B2_ENDPOINTS[2]) +LINK_C3_A2_ID, LINK_C3_A2 = compose_link(DEVICE_C3_ENDPOINTS[2], DEVICE_A2_ENDPOINTS[2]) +LINK_B2_C1_ID, LINK_B2_C1 = compose_link(DEVICE_B2_ENDPOINTS[2], DEVICE_C1_ENDPOINTS[2]) + # ----- IntraDomain A Links -------------------------------------------------------------------------------------------- LINK_A1_A2_ID, LINK_A1_A2 = compose_link(DEVICE_A1_ENDPOINTS[0], DEVICE_A2_ENDPOINTS[0]) LINK_A1_A3_ID, LINK_A1_A3 = compose_link(DEVICE_A1_ENDPOINTS[1], DEVICE_A3_ENDPOINTS[0]) LINK_A2_A3_ID, LINK_A2_A3 = compose_link(DEVICE_A2_ENDPOINTS[1], DEVICE_A3_ENDPOINTS[1]) +LINK_A2_A1_ID, LINK_A2_A1 = compose_link(DEVICE_A2_ENDPOINTS[0], DEVICE_A1_ENDPOINTS[0]) +LINK_A3_A1_ID, LINK_A3_A1 = compose_link(DEVICE_A3_ENDPOINTS[0], DEVICE_A1_ENDPOINTS[1]) +LINK_A3_A2_ID, LINK_A3_A2 = compose_link(DEVICE_A3_ENDPOINTS[1], DEVICE_A2_ENDPOINTS[1]) + # ----- IntraDomain B Links -------------------------------------------------------------------------------------------- LINK_B1_B2_ID, LINK_B1_B2 = compose_link(DEVICE_B1_ENDPOINTS[0], DEVICE_B2_ENDPOINTS[0]) LINK_B1_B3_ID, LINK_B1_B3 = compose_link(DEVICE_B1_ENDPOINTS[1], DEVICE_B3_ENDPOINTS[0]) LINK_B2_B3_ID, LINK_B2_B3 = compose_link(DEVICE_B2_ENDPOINTS[1], DEVICE_B3_ENDPOINTS[1]) +LINK_B2_B1_ID, LINK_B2_B1 = compose_link(DEVICE_B2_ENDPOINTS[0], DEVICE_B1_ENDPOINTS[0]) +LINK_B3_B1_ID, LINK_B3_B1 = compose_link(DEVICE_B3_ENDPOINTS[0], DEVICE_B1_ENDPOINTS[1]) +LINK_B3_B2_ID, LINK_B3_B2 = compose_link(DEVICE_B3_ENDPOINTS[1], DEVICE_B2_ENDPOINTS[1]) + # ----- IntraDomain C Links -------------------------------------------------------------------------------------------- LINK_C1_C2_ID, LINK_C1_C2 = compose_link(DEVICE_C1_ENDPOINTS[0], DEVICE_C2_ENDPOINTS[0]) LINK_C1_C3_ID, LINK_C1_C3 = compose_link(DEVICE_C1_ENDPOINTS[1], DEVICE_C3_ENDPOINTS[0]) LINK_C2_C3_ID, LINK_C2_C3 = compose_link(DEVICE_C2_ENDPOINTS[1], DEVICE_C3_ENDPOINTS[1]) +LINK_C2_C1_ID, LINK_C2_C1 = compose_link(DEVICE_C2_ENDPOINTS[0], DEVICE_C1_ENDPOINTS[0]) +LINK_C3_C1_ID, LINK_C3_C1 = compose_link(DEVICE_C3_ENDPOINTS[0], DEVICE_C1_ENDPOINTS[1]) +LINK_C3_C2_ID, LINK_C3_C2 = compose_link(DEVICE_C3_ENDPOINTS[1], DEVICE_C2_ENDPOINTS[1]) + # ----- Service -------------------------------------------------------------------------------------------------------- SERVICE_A1_B1 = compose_service(DEVICE_A1_ENDPOINTS[2], DEVICE_B1_ENDPOINTS[2], constraints=[ json_constraint_sla_capacity(10.0), @@ -108,31 +123,38 @@ DEVICES = [ DEVICE_A1, DEVICE_A2, DEVICE_A3, DEVICE_B1, DEVICE_B2, DEVICE_B3, DEVICE_C1, DEVICE_C2, DEVICE_C3, ] LINKS = [ LINK_A2_C3, LINK_C1_B2, + LINK_C3_A2, LINK_B2_C1, + LINK_A1_A2, LINK_A1_A3, LINK_A2_A3, + LINK_A2_A1, LINK_A3_A1, LINK_A3_A2, + LINK_B1_B2, LINK_B1_B3, LINK_B2_B3, - LINK_C1_C2, LINK_C1_C3, LINK_C2_C3, ] + LINK_B2_B1, LINK_B3_B1, LINK_B3_B2, + + LINK_C1_C2, LINK_C1_C3, LINK_C2_C3, + LINK_C2_C1, LINK_C3_C1, LINK_C3_C2, ] SERVICES = [ SERVICE_A1_B1] -OBJECTS_PER_TOPOLOGY = [ - (TOPOLOGY_ADMIN_ID, - [ DEVICE_A1_ID, DEVICE_A2_ID, DEVICE_A3_ID, - DEVICE_B1_ID, DEVICE_B2_ID, DEVICE_B3_ID, - DEVICE_C1_ID, DEVICE_C2_ID, DEVICE_C3_ID, ], - [ LINK_A2_C3_ID, LINK_C1_B2_ID, - LINK_A1_A2_ID, LINK_A1_A3_ID, LINK_A2_A3_ID, - LINK_B1_B2_ID, LINK_B1_B3_ID, LINK_B2_B3_ID, - LINK_C1_C2_ID, LINK_C1_C3_ID, LINK_C2_C3_ID, ], - ), - (TOPOLOGY_A_ID, - [ DEVICE_A1_ID, DEVICE_A2_ID, DEVICE_A3_ID, ], - [ LINK_A1_A2_ID, LINK_A1_A3_ID, LINK_A2_A3_ID, ], - ), - (TOPOLOGY_B_ID, - [ DEVICE_B1_ID, DEVICE_B2_ID, DEVICE_B3_ID, ], - [ LINK_B1_B2_ID, LINK_B1_B3_ID, LINK_B2_B3_ID, ], - ), - (TOPOLOGY_C_ID, - [ DEVICE_C1_ID, DEVICE_C2_ID, DEVICE_C3_ID, ], - [ LINK_C1_C2_ID, LINK_C1_C3_ID, LINK_C2_C3_ID, ], - ), -] +#OBJECTS_PER_TOPOLOGY = [ +# (TOPOLOGY_ADMIN_ID, +# [ DEVICE_A1_ID, DEVICE_A2_ID, DEVICE_A3_ID, +# DEVICE_B1_ID, DEVICE_B2_ID, DEVICE_B3_ID, +# DEVICE_C1_ID, DEVICE_C2_ID, DEVICE_C3_ID, ], +# [ LINK_A2_C3_ID, LINK_C1_B2_ID, +# LINK_A1_A2_ID, LINK_A1_A3_ID, LINK_A2_A3_ID, +# LINK_B1_B2_ID, LINK_B1_B3_ID, LINK_B2_B3_ID, +# LINK_C1_C2_ID, LINK_C1_C3_ID, LINK_C2_C3_ID, ], +# ), +# (TOPOLOGY_A_ID, +# [ DEVICE_A1_ID, DEVICE_A2_ID, DEVICE_A3_ID, ], +# [ LINK_A1_A2_ID, LINK_A1_A3_ID, LINK_A2_A3_ID, ], +# ), +# (TOPOLOGY_B_ID, +# [ DEVICE_B1_ID, DEVICE_B2_ID, DEVICE_B3_ID, ], +# [ LINK_B1_B2_ID, LINK_B1_B3_ID, LINK_B2_B3_ID, ], +# ), +# (TOPOLOGY_C_ID, +# [ DEVICE_C1_ID, DEVICE_C2_ID, DEVICE_C3_ID, ], +# [ LINK_C1_C2_ID, LINK_C1_C3_ID, LINK_C2_C3_ID, ], +# ), +#] diff --git a/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN.py b/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN.py index 9ee784e1f..053dfd4c4 100644 --- a/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN.py +++ b/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN.py @@ -118,6 +118,11 @@ LINK_DC1GW_CS1GW2_ID, LINK_DC1GW_CS1GW2 = compose_link(DEV_DC1GW_EPS[1], DEV_CS1 LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW1 = compose_link(DEV_DC2GW_EPS[0], DEV_CS2GW1_EPS[0]) LINK_DC2GW_CS2GW2_ID, LINK_DC2GW_CS2GW2 = compose_link(DEV_DC2GW_EPS[1], DEV_CS2GW2_EPS[0]) +LINK_CS1GW1_DC1GW_ID, LINK_CS1GW1_DC1GW = compose_link(DEV_CS1GW1_EPS[0], DEV_DC1GW_EPS[0]) +LINK_CS1GW2_DC1GW_ID, LINK_CS1GW2_DC1GW = compose_link(DEV_CS1GW2_EPS[0], DEV_DC1GW_EPS[1]) +LINK_CS2GW1_DC2GW_ID, LINK_CS2GW1_DC2GW = compose_link(DEV_CS2GW1_EPS[0], DEV_DC2GW_EPS[0]) +LINK_CS2GW2_DC2GW_ID, LINK_CS2GW2_DC2GW = compose_link(DEV_CS2GW2_EPS[0], DEV_DC2GW_EPS[1]) + # InterDomain CSGW-TN LINK_CS1GW1_TNR1_ID, LINK_CS1GW1_TNR1 = compose_link(DEV_CS1GW1_EPS[1], DEV_TNR1_EPS[0]) LINK_CS1GW2_TNR2_ID, LINK_CS1GW2_TNR2 = compose_link(DEV_CS1GW2_EPS[1], DEV_TNR2_EPS[0]) @@ -128,6 +133,15 @@ LINK_CS2GW2_TNR4_ID, LINK_CS2GW2_TNR4 = compose_link(DEV_CS2GW2_EPS[1], DEV_TNR4 LINK_CS2GW1_TNR4_ID, LINK_CS2GW1_TNR4 = compose_link(DEV_CS2GW1_EPS[2], DEV_TNR4_EPS[1]) LINK_CS2GW2_TNR3_ID, LINK_CS2GW2_TNR3 = compose_link(DEV_CS2GW2_EPS[2], DEV_TNR3_EPS[1]) +LINK_TNR1_CS1GW1_ID, LINK_TNR1_CS1GW1 = compose_link(DEV_TNR1_EPS[0], DEV_CS1GW1_EPS[1]) +LINK_TNR2_CS1GW2_ID, LINK_TNR2_CS1GW2 = compose_link(DEV_TNR2_EPS[0], DEV_CS1GW2_EPS[1]) +LINK_TNR2_CS1GW1_ID, LINK_TNR2_CS1GW1 = compose_link(DEV_TNR2_EPS[1], DEV_CS1GW1_EPS[2]) +LINK_TNR1_CS1GW2_ID, LINK_TNR1_CS1GW2 = compose_link(DEV_TNR1_EPS[1], DEV_CS1GW2_EPS[2]) +LINK_TNR3_CS2GW1_ID, LINK_TNR3_CS2GW1 = compose_link(DEV_TNR3_EPS[0], DEV_CS2GW1_EPS[1]) +LINK_TNR4_CS2GW2_ID, LINK_TNR4_CS2GW2 = compose_link(DEV_TNR4_EPS[0], DEV_CS2GW2_EPS[1]) +LINK_TNR4_CS2GW1_ID, LINK_TNR4_CS2GW1 = compose_link(DEV_TNR4_EPS[1], DEV_CS2GW1_EPS[2]) +LINK_TNR3_CS2GW2_ID, LINK_TNR3_CS2GW2 = compose_link(DEV_TNR3_EPS[1], DEV_CS2GW2_EPS[2]) + # IntraDomain TN LINK_TNR1_TNR2_ID, LINK_TNR1_TNR2 = compose_link(DEV_TNR1_EPS[2], DEV_TNR2_EPS[3]) LINK_TNR2_TNR3_ID, LINK_TNR2_TNR3 = compose_link(DEV_TNR2_EPS[2], DEV_TNR3_EPS[3]) @@ -136,6 +150,13 @@ LINK_TNR4_TNR1_ID, LINK_TNR4_TNR1 = compose_link(DEV_TNR4_EPS[2], DEV_TNR1_EPS[3 LINK_TNR1_TNR3_ID, LINK_TNR1_TNR3 = compose_link(DEV_TNR1_EPS[4], DEV_TNR3_EPS[4]) LINK_TNR2_TNR4_ID, LINK_TNR2_TNR4 = compose_link(DEV_TNR2_EPS[4], DEV_TNR4_EPS[4]) +LINK_TNR2_TNR1_ID, LINK_TNR2_TNR1 = compose_link(DEV_TNR2_EPS[3], DEV_TNR1_EPS[2]) +LINK_TNR3_TNR2_ID, LINK_TNR3_TNR2 = compose_link(DEV_TNR3_EPS[3], DEV_TNR2_EPS[2]) +LINK_TNR4_TNR3_ID, LINK_TNR4_TNR3 = compose_link(DEV_TNR4_EPS[3], DEV_TNR3_EPS[2]) +LINK_TNR1_TNR4_ID, LINK_TNR1_TNR4 = compose_link(DEV_TNR1_EPS[3], DEV_TNR4_EPS[2]) +LINK_TNR3_TNR1_ID, LINK_TNR3_TNR1 = compose_link(DEV_TNR3_EPS[4], DEV_TNR1_EPS[4]) +LINK_TNR4_TNR2_ID, LINK_TNR4_TNR2 = compose_link(DEV_TNR4_EPS[4], DEV_TNR2_EPS[4]) + # ----- Service -------------------------------------------------------------------------------------------------------- SERVICE_DC1GW_DC2GW = compose_service(DEV_DC1GW_EPS[2], DEV_DC2GW_EPS[2], constraints=[ @@ -151,41 +172,44 @@ DEVICES = [ DEV_DC1GW, DEV_DC2GW, DEV_TNR1, DEV_TNR2, DEV_TNR3, DEV_TNR4, ] LINKS = [ LINK_DC1GW_CS1GW1, LINK_DC1GW_CS1GW2, LINK_DC2GW_CS2GW1, LINK_DC2GW_CS2GW2, + LINK_CS1GW1_DC1GW, LINK_CS1GW2_DC1GW, LINK_CS2GW1_DC2GW, LINK_CS2GW2_DC2GW, + LINK_CS1GW1_TNR1, LINK_CS1GW2_TNR2, LINK_CS1GW1_TNR2, LINK_CS1GW2_TNR1, LINK_CS2GW1_TNR3, LINK_CS2GW2_TNR4, LINK_CS2GW1_TNR4, LINK_CS2GW2_TNR3, LINK_TNR1_TNR2, LINK_TNR2_TNR3, LINK_TNR3_TNR4, LINK_TNR4_TNR1, LINK_TNR1_TNR3, LINK_TNR2_TNR4, + LINK_TNR2_TNR1, LINK_TNR3_TNR2, LINK_TNR4_TNR3, LINK_TNR1_TNR4, LINK_TNR3_TNR1, LINK_TNR4_TNR2, ] SERVICES = [ SERVICE_DC1GW_DC2GW ] -OBJECTS_PER_TOPOLOGY = [ - (TOPO_ADMIN_ID, - [ DEV_DC1GW_ID, DEV_DC2GW_ID, - DEV_CS1GW1_ID, DEV_CS1GW2_ID, DEV_CS2GW1_ID, DEV_CS2GW2_ID, - DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, - ], - [ LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW2_ID, LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW2_ID, - LINK_CS1GW1_TNR1_ID, LINK_CS1GW2_TNR2_ID, LINK_CS1GW1_TNR2_ID, LINK_CS1GW2_TNR1_ID, - LINK_CS2GW1_TNR3_ID, LINK_CS2GW2_TNR4_ID, LINK_CS2GW1_TNR4_ID, LINK_CS2GW2_TNR3_ID, - LINK_TNR1_TNR2_ID, LINK_TNR2_TNR3_ID, LINK_TNR3_TNR4_ID, LINK_TNR4_TNR1_ID, LINK_TNR1_TNR3_ID, - LINK_TNR2_TNR4_ID, - ], - ), - (TOPO_DC1_ID, - [DEV_DC1GW_ID], - []), - (TOPO_DC2_ID, - [DEV_DC2GW_ID], - []), - (TOPO_CS1_ID, - [DEV_CS1GW1_ID, DEV_CS1GW2_ID], - []), - (TOPO_CS2_ID, - [DEV_CS2GW1_ID, DEV_CS2GW2_ID], - []), - (TOPO_TN_ID, - [ DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, - ], - [ LINK_TNR1_TNR2_ID, LINK_TNR2_TNR3_ID, LINK_TNR3_TNR4_ID, LINK_TNR4_TNR1_ID, LINK_TNR1_TNR3_ID, - LINK_TNR2_TNR4_ID, - ]), -] +#OBJECTS_PER_TOPOLOGY = [ +# (TOPO_ADMIN_ID, +# [ DEV_DC1GW_ID, DEV_DC2GW_ID, +# DEV_CS1GW1_ID, DEV_CS1GW2_ID, DEV_CS2GW1_ID, DEV_CS2GW2_ID, +# DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, +# ], +# [ LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW2_ID, LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW2_ID, +# LINK_CS1GW1_TNR1_ID, LINK_CS1GW2_TNR2_ID, LINK_CS1GW1_TNR2_ID, LINK_CS1GW2_TNR1_ID, +# LINK_CS2GW1_TNR3_ID, LINK_CS2GW2_TNR4_ID, LINK_CS2GW1_TNR4_ID, LINK_CS2GW2_TNR3_ID, +# LINK_TNR1_TNR2_ID, LINK_TNR2_TNR3_ID, LINK_TNR3_TNR4_ID, LINK_TNR4_TNR1_ID, LINK_TNR1_TNR3_ID, +# LINK_TNR2_TNR4_ID, +# ], +# ), +# (TOPO_DC1_ID, +# [DEV_DC1GW_ID], +# []), +# (TOPO_DC2_ID, +# [DEV_DC2GW_ID], +# []), +# (TOPO_CS1_ID, +# [DEV_CS1GW1_ID, DEV_CS1GW2_ID], +# []), +# (TOPO_CS2_ID, +# [DEV_CS2GW1_ID, DEV_CS2GW2_ID], +# []), +# (TOPO_TN_ID, +# [ DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, +# ], +# [ LINK_TNR1_TNR2_ID, LINK_TNR2_TNR3_ID, LINK_TNR3_TNR4_ID, LINK_TNR4_TNR1_ID, LINK_TNR1_TNR3_ID, +# LINK_TNR2_TNR4_ID, +# ]), +#] diff --git a/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN_OLS.py b/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN_OLS.py index 71510d088..2c8428568 100644 --- a/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN_OLS.py +++ b/src/pathcomp/frontend/tests/Objects_DC_CSGW_TN_OLS.py @@ -130,6 +130,11 @@ LINK_DC1GW_CS1GW2_ID, LINK_DC1GW_CS1GW2 = compose_link(DEV_DC1GW_EPS[1], DEV_CS1 LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW1 = compose_link(DEV_DC2GW_EPS[0], DEV_CS2GW1_EPS[0]) LINK_DC2GW_CS2GW2_ID, LINK_DC2GW_CS2GW2 = compose_link(DEV_DC2GW_EPS[1], DEV_CS2GW2_EPS[0]) +LINK_CS1GW1_DC1GW_ID, LINK_CS1GW1_DC1GW = compose_link(DEV_CS1GW1_EPS[0], DEV_DC1GW_EPS[0]) +LINK_CS1GW2_DC1GW_ID, LINK_CS1GW2_DC1GW = compose_link(DEV_CS1GW2_EPS[0], DEV_DC1GW_EPS[1]) +LINK_CS2GW1_DC2GW_ID, LINK_CS2GW1_DC2GW = compose_link(DEV_CS2GW1_EPS[0], DEV_DC2GW_EPS[0]) +LINK_CS2GW2_DC2GW_ID, LINK_CS2GW2_DC2GW = compose_link(DEV_CS2GW2_EPS[0], DEV_DC2GW_EPS[1]) + # InterDomain CSGW-TN LINK_CS1GW1_TNR1_ID, LINK_CS1GW1_TNR1 = compose_link(DEV_CS1GW1_EPS[1], DEV_TNR1_EPS[0]) LINK_CS1GW2_TNR2_ID, LINK_CS1GW2_TNR2 = compose_link(DEV_CS1GW2_EPS[1], DEV_TNR2_EPS[0]) @@ -140,12 +145,26 @@ LINK_CS2GW2_TNR4_ID, LINK_CS2GW2_TNR4 = compose_link(DEV_CS2GW2_EPS[1], DEV_TNR4 LINK_CS2GW1_TNR4_ID, LINK_CS2GW1_TNR4 = compose_link(DEV_CS2GW1_EPS[2], DEV_TNR4_EPS[1]) LINK_CS2GW2_TNR3_ID, LINK_CS2GW2_TNR3 = compose_link(DEV_CS2GW2_EPS[2], DEV_TNR3_EPS[1]) +LINK_TNR1_CS1GW1_ID, LINK_TNR1_CS1GW1 = compose_link(DEV_TNR1_EPS[0], DEV_CS1GW1_EPS[1]) +LINK_TNR2_CS1GW2_ID, LINK_TNR2_CS1GW2 = compose_link(DEV_TNR2_EPS[0], DEV_CS1GW2_EPS[1]) +LINK_TNR2_CS1GW1_ID, LINK_TNR2_CS1GW1 = compose_link(DEV_TNR2_EPS[1], DEV_CS1GW1_EPS[2]) +LINK_TNR1_CS1GW2_ID, LINK_TNR1_CS1GW2 = compose_link(DEV_TNR1_EPS[1], DEV_CS1GW2_EPS[2]) +LINK_TNR3_CS2GW1_ID, LINK_TNR3_CS2GW1 = compose_link(DEV_TNR3_EPS[0], DEV_CS2GW1_EPS[1]) +LINK_TNR4_CS2GW2_ID, LINK_TNR4_CS2GW2 = compose_link(DEV_TNR4_EPS[0], DEV_CS2GW2_EPS[1]) +LINK_TNR4_CS2GW1_ID, LINK_TNR4_CS2GW1 = compose_link(DEV_TNR4_EPS[1], DEV_CS2GW1_EPS[2]) +LINK_TNR3_CS2GW2_ID, LINK_TNR3_CS2GW2 = compose_link(DEV_TNR3_EPS[1], DEV_CS2GW2_EPS[2]) + # IntraDomain TN LINK_TNR1_TOLS_ID, LINK_TNR1_TOLS = compose_link(DEV_TNR1_EPS[2], DEV_TOLS_EPS[0]) LINK_TNR2_TOLS_ID, LINK_TNR2_TOLS = compose_link(DEV_TNR2_EPS[2], DEV_TOLS_EPS[1]) LINK_TNR3_TOLS_ID, LINK_TNR3_TOLS = compose_link(DEV_TNR3_EPS[2], DEV_TOLS_EPS[2]) LINK_TNR4_TOLS_ID, LINK_TNR4_TOLS = compose_link(DEV_TNR4_EPS[2], DEV_TOLS_EPS[3]) +LINK_TOLS_TNR1_ID, LINK_TOLS_TNR1 = compose_link(DEV_TOLS_EPS[0], DEV_TNR1_EPS[2]) +LINK_TOLS_TNR2_ID, LINK_TOLS_TNR2 = compose_link(DEV_TOLS_EPS[1], DEV_TNR2_EPS[2]) +LINK_TOLS_TNR3_ID, LINK_TOLS_TNR3 = compose_link(DEV_TOLS_EPS[2], DEV_TNR3_EPS[2]) +LINK_TOLS_TNR4_ID, LINK_TOLS_TNR4 = compose_link(DEV_TOLS_EPS[3], DEV_TNR4_EPS[2]) + # ----- Service -------------------------------------------------------------------------------------------------------- SERVICE_DC1GW_DC2GW = compose_service(DEV_DC1GW_EPS[2], DEV_DC2GW_EPS[2], constraints=[ @@ -162,41 +181,47 @@ DEVICES = [ DEV_DC1GW, DEV_DC2GW, DEV_TOLS, ] LINKS = [ LINK_DC1GW_CS1GW1, LINK_DC1GW_CS1GW2, LINK_DC2GW_CS2GW1, LINK_DC2GW_CS2GW2, + LINK_CS1GW1_DC1GW, LINK_CS1GW2_DC1GW, LINK_CS2GW1_DC2GW, LINK_CS2GW2_DC2GW, + LINK_CS1GW1_TNR1, LINK_CS1GW2_TNR2, LINK_CS1GW1_TNR2, LINK_CS1GW2_TNR1, LINK_CS2GW1_TNR3, LINK_CS2GW2_TNR4, LINK_CS2GW1_TNR4, LINK_CS2GW2_TNR3, + LINK_TNR1_CS1GW1, LINK_TNR2_CS1GW2, LINK_TNR2_CS1GW1, LINK_TNR1_CS1GW2, + LINK_TNR3_CS2GW1, LINK_TNR4_CS2GW2, LINK_TNR4_CS2GW1, LINK_TNR3_CS2GW2, + LINK_TNR1_TOLS, LINK_TNR2_TOLS, LINK_TNR3_TOLS, LINK_TNR4_TOLS, + LINK_TOLS_TNR1, LINK_TOLS_TNR2, LINK_TOLS_TNR3, LINK_TOLS_TNR4, ] SERVICES = [ SERVICE_DC1GW_DC2GW ] -OBJECTS_PER_TOPOLOGY = [ - (TOPO_ADMIN_ID, - [ DEV_DC1GW_ID, DEV_DC2GW_ID, - DEV_CS1GW1_ID, DEV_CS1GW2_ID, DEV_CS2GW1_ID, DEV_CS2GW2_ID, - DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, - DEV_TOLS_ID, - ], - [ LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW2_ID, LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW2_ID, - LINK_CS1GW1_TNR1_ID, LINK_CS1GW2_TNR2_ID, LINK_CS1GW1_TNR2_ID, LINK_CS1GW2_TNR1_ID, - LINK_CS2GW1_TNR3_ID, LINK_CS2GW2_TNR4_ID, LINK_CS2GW1_TNR4_ID, LINK_CS2GW2_TNR3_ID, - LINK_TNR1_TOLS_ID, LINK_TNR2_TOLS_ID, LINK_TNR3_TOLS_ID, LINK_TNR4_TOLS_ID, - ], - ), - (TOPO_DC1_ID, - [DEV_DC1GW_ID], - []), - (TOPO_DC2_ID, - [DEV_DC2GW_ID], - []), - (TOPO_CS1_ID, - [DEV_CS1GW1_ID, DEV_CS1GW2_ID], - []), - (TOPO_CS2_ID, - [DEV_CS2GW1_ID, DEV_CS2GW2_ID], - []), - (TOPO_TN_ID, - [ DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, - DEV_TOLS_ID, - ], - [ LINK_TNR1_TOLS_ID, LINK_TNR2_TOLS_ID, LINK_TNR3_TOLS_ID, LINK_TNR4_TOLS_ID, - ]), -] +#OBJECTS_PER_TOPOLOGY = [ +# (TOPO_ADMIN_ID, +# [ DEV_DC1GW_ID, DEV_DC2GW_ID, +# DEV_CS1GW1_ID, DEV_CS1GW2_ID, DEV_CS2GW1_ID, DEV_CS2GW2_ID, +# DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, +# DEV_TOLS_ID, +# ], +# [ LINK_DC1GW_CS1GW1_ID, LINK_DC1GW_CS1GW2_ID, LINK_DC2GW_CS2GW1_ID, LINK_DC2GW_CS2GW2_ID, +# LINK_CS1GW1_TNR1_ID, LINK_CS1GW2_TNR2_ID, LINK_CS1GW1_TNR2_ID, LINK_CS1GW2_TNR1_ID, +# LINK_CS2GW1_TNR3_ID, LINK_CS2GW2_TNR4_ID, LINK_CS2GW1_TNR4_ID, LINK_CS2GW2_TNR3_ID, +# LINK_TNR1_TOLS_ID, LINK_TNR2_TOLS_ID, LINK_TNR3_TOLS_ID, LINK_TNR4_TOLS_ID, +# ], +# ), +# (TOPO_DC1_ID, +# [DEV_DC1GW_ID], +# []), +# (TOPO_DC2_ID, +# [DEV_DC2GW_ID], +# []), +# (TOPO_CS1_ID, +# [DEV_CS1GW1_ID, DEV_CS1GW2_ID], +# []), +# (TOPO_CS2_ID, +# [DEV_CS2GW1_ID, DEV_CS2GW2_ID], +# []), +# (TOPO_TN_ID, +# [ DEV_TNR1_ID, DEV_TNR2_ID, DEV_TNR3_ID, DEV_TNR4_ID, +# DEV_TOLS_ID, +# ], +# [ LINK_TNR1_TOLS_ID, LINK_TNR2_TOLS_ID, LINK_TNR3_TOLS_ID, LINK_TNR4_TOLS_ID, +# ]), +#] diff --git a/src/pathcomp/frontend/tests/test_unitary.py b/src/pathcomp/frontend/tests/test_unitary.py index 8088259b8..ad03e2626 100644 --- a/src/pathcomp/frontend/tests/test_unitary.py +++ b/src/pathcomp/frontend/tests/test_unitary.py @@ -13,12 +13,15 @@ # limitations under the License. import copy, logging, os +from common.Constants import DEFAULT_CONTEXT_NAME from common.proto.context_pb2 import Context, ContextId, DeviceId, Link, LinkId, Topology, Device, TopologyId from common.proto.pathcomp_pb2 import PathCompRequest +from common.tools.descriptor.Loader import DescriptorLoader, check_descriptor_load_results, validate_empty_scenario from common.tools.grpc.Tools import grpc_message_to_json from common.tools.object_factory.Constraint import ( json_constraint_custom, json_constraint_endpoint_location_region, json_constraint_endpoint_priority, json_constraint_sla_availability, json_constraint_sla_capacity, json_constraint_sla_latency) +from common.tools.object_factory.Context import json_context_id from common.tools.object_factory.Device import json_device_id from common.tools.object_factory.EndPoint import json_endpoint_id from common.tools.object_factory.Service import json_service_l3nm_planned @@ -58,31 +61,29 @@ from .PrepareTestScenario import ( # pylint: disable=unused-import LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.DEBUG) -def test_prepare_environment( - context_client : ContextClient): # pylint: disable=redefined-outer-name - - for context in CONTEXTS : context_client.SetContext (Context (**context )) - for topology in TOPOLOGIES: context_client.SetTopology(Topology(**topology)) - for device in DEVICES : context_client.SetDevice (Device (**device )) - for link in LINKS : context_client.SetLink (Link (**link )) - - for topology_id, device_ids, link_ids in OBJECTS_PER_TOPOLOGY: - topology = Topology() - topology.CopyFrom(context_client.GetTopology(TopologyId(**topology_id))) +ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_NAME)) +DESCRIPTORS = { + 'dummy_mode': True, + 'contexts' : CONTEXTS, + 'topologies': TOPOLOGIES, + 'devices' : DEVICES, + 'links' : LINKS, +} - device_ids_in_topology = {device_id.device_uuid.uuid for device_id in topology.device_ids} - func_device_id_not_added = lambda device_id: device_id['device_uuid']['uuid'] not in device_ids_in_topology - func_device_id_json_to_grpc = lambda device_id: DeviceId(**device_id) - device_ids_to_add = list(map(func_device_id_json_to_grpc, filter(func_device_id_not_added, device_ids))) - topology.device_ids.extend(device_ids_to_add) +def test_prepare_environment( + context_client : ContextClient, # pylint: disable=redefined-outer-name +) -> None: + validate_empty_scenario(context_client) - link_ids_in_topology = {link_id.link_uuid.uuid for link_id in topology.link_ids} - func_link_id_not_added = lambda link_id: link_id['link_uuid']['uuid'] not in link_ids_in_topology - func_link_id_json_to_grpc = lambda link_id: LinkId(**link_id) - link_ids_to_add = list(map(func_link_id_json_to_grpc, filter(func_link_id_not_added, link_ids))) - topology.link_ids.extend(link_ids_to_add) + descriptor_loader = DescriptorLoader(descriptors=DESCRIPTORS, context_client=context_client) + results = descriptor_loader.process() + check_descriptor_load_results(results, descriptor_loader) + descriptor_loader.validate() - context_client.SetTopology(topology) + # Verify the scenario has no services/slices + response = context_client.GetContext(ADMIN_CONTEXT_ID) + assert len(response.service_ids) == 0 + assert len(response.slice_ids) == 0 def test_request_service_shortestpath( pathcomp_client : PathCompClient): # pylint: disable=redefined-outer-name @@ -266,9 +267,15 @@ def test_request_service_kdisjointpath( def test_cleanup_environment( - context_client : ContextClient): # pylint: disable=redefined-outer-name - - for link in LINKS : context_client.RemoveLink (LinkId (**link ['link_id' ])) - for device in DEVICES : context_client.RemoveDevice (DeviceId (**device ['device_id' ])) - for topology in TOPOLOGIES: context_client.RemoveTopology(TopologyId(**topology['topology_id'])) - for context in CONTEXTS : context_client.RemoveContext (ContextId (**context ['context_id' ])) + context_client : ContextClient, # pylint: disable=redefined-outer-name +) -> None: + # Verify the scenario has no services/slices + response = context_client.GetContext(ADMIN_CONTEXT_ID) + assert len(response.service_ids) == 0 + assert len(response.slice_ids) == 0 + + # Load descriptors and validate the base scenario + descriptor_loader = DescriptorLoader(descriptors=DESCRIPTORS, context_client=context_client) + descriptor_loader.validate() + descriptor_loader.unload() + validate_empty_scenario(context_client) -- GitLab From a0f7721f1af9cb9cc806c35d70335962d1cc7ad2 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sun, 26 Mar 2023 07:04:17 +0000 Subject: [PATCH 03/12] PathComp component - Frontend: - Fixed Dockerfile --- src/pathcomp/frontend/Dockerfile | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/pathcomp/frontend/Dockerfile b/src/pathcomp/frontend/Dockerfile index 352de75f3..9384b3e19 100644 --- a/src/pathcomp/frontend/Dockerfile +++ b/src/pathcomp/frontend/Dockerfile @@ -62,8 +62,14 @@ RUN python3 -m pip install -r requirements.txt # Add component files into working directory WORKDIR /var/teraflow -COPY src/context/. context/ -COPY src/device/. device/ +COPY src/context/__init__.py context/__init__.py +COPY src/context/client/. context/client/ +COPY src/device/__init__.py device/__init__.py +COPY src/device/client/. device/client/ +COPY src/service/__init__.py service/__init__.py +COPY src/service/client/. service/client/ +COPY src/slice/__init__.py slice/__init__.py +COPY src/slice/client/. slice/client/ COPY src/pathcomp/. pathcomp/ # Start the service -- GitLab From b2b035c92d9b8715ec7dbf0852ce91c272214a53 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sun, 26 Mar 2023 07:15:30 +0000 Subject: [PATCH 04/12] Policy component: - Fixed code formatting --- .../src/test/java/eu/teraflow/policy/SerializerTest.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/policy/src/test/java/eu/teraflow/policy/SerializerTest.java b/src/policy/src/test/java/eu/teraflow/policy/SerializerTest.java index 641026461..b0fb90864 100644 --- a/src/policy/src/test/java/eu/teraflow/policy/SerializerTest.java +++ b/src/policy/src/test/java/eu/teraflow/policy/SerializerTest.java @@ -3601,7 +3601,8 @@ class SerializerTest { ContextOuterClass.DeviceDriverEnum.DEVICEDRIVER_ONF_TR_352), Arguments.of(DeviceDriverEnum.XR, ContextOuterClass.DeviceDriverEnum.DEVICEDRIVER_XR), Arguments.of( - DeviceDriverEnum.IETF_L2VPN, ContextOuterClass.DeviceDriverEnum.DEVICEDRIVER_IETF_L2VPN), + DeviceDriverEnum.IETF_L2VPN, + ContextOuterClass.DeviceDriverEnum.DEVICEDRIVER_IETF_L2VPN), Arguments.of( DeviceDriverEnum.UNDEFINED, ContextOuterClass.DeviceDriverEnum.DEVICEDRIVER_UNDEFINED)); } -- GitLab From bd3fe6ed75216d8c052655d51c1440960e85b45a Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sun, 26 Mar 2023 07:24:16 +0000 Subject: [PATCH 05/12] PathComp component - Frontend: - Fixed unitary tests --- src/pathcomp/frontend/tests/test_unitary.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/pathcomp/frontend/tests/test_unitary.py b/src/pathcomp/frontend/tests/test_unitary.py index ad03e2626..f4e3cbf0f 100644 --- a/src/pathcomp/frontend/tests/test_unitary.py +++ b/src/pathcomp/frontend/tests/test_unitary.py @@ -14,7 +14,7 @@ import copy, logging, os from common.Constants import DEFAULT_CONTEXT_NAME -from common.proto.context_pb2 import Context, ContextId, DeviceId, Link, LinkId, Topology, Device, TopologyId +from common.proto.context_pb2 import ContextId from common.proto.pathcomp_pb2 import PathCompRequest from common.tools.descriptor.Loader import DescriptorLoader, check_descriptor_load_results, validate_empty_scenario from common.tools.grpc.Tools import grpc_message_to_json @@ -29,9 +29,9 @@ from context.client.ContextClient import ContextClient from pathcomp.frontend.client.PathCompClient import PathCompClient # Scenarios: -#from .Objects_A_B_C import CONTEXTS, DEVICES, LINKS, OBJECTS_PER_TOPOLOGY, SERVICES, TOPOLOGIES -#from .Objects_DC_CSGW_TN import CONTEXTS, DEVICES, LINKS, OBJECTS_PER_TOPOLOGY, SERVICES, TOPOLOGIES -from .Objects_DC_CSGW_TN_OLS import CONTEXTS, DEVICES, LINKS, OBJECTS_PER_TOPOLOGY, SERVICES, TOPOLOGIES +#from .Objects_A_B_C import CONTEXTS, DEVICES, LINKS, SERVICES, TOPOLOGIES +#from .Objects_DC_CSGW_TN import CONTEXTS, DEVICES, LINKS, SERVICES, TOPOLOGIES +from .Objects_DC_CSGW_TN_OLS import CONTEXTS, DEVICES, LINKS, SERVICES, TOPOLOGIES # configure backend environment variables before overwriting them with fixtures to use real backend pathcomp DEFAULT_PATHCOMP_BACKEND_SCHEME = 'http' -- GitLab From 8523a11d230a180af48dbe1cf9c9559d8c278318 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sun, 26 Mar 2023 08:04:05 +0000 Subject: [PATCH 06/12] PathComp component - Frontend: - Fixed KDisjoint Algorithm Reply --- .../algorithms/KDisjointPathAlgorithm.py | 120 ++++++++++-------- 1 file changed, 65 insertions(+), 55 deletions(-) diff --git a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py index a6d39ee36..b15c89602 100644 --- a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py +++ b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py @@ -141,7 +141,7 @@ class KDisjointPathAlgorithm(_Algorithm): Path = List[Dict] Path_NoPath = Optional[Path] # None = no path, list = path - self.json_reply : Dict[Tuple[str, str], List[Path_NoPath]] = dict() + service_to_paths : Dict[Tuple[str, str], List[Path_NoPath]] = dict() for num_path in range(self.num_disjoint): algorithm.service_list = list() @@ -189,66 +189,76 @@ class KDisjointPathAlgorithm(_Algorithm): for response in response_list: service_id = response['serviceId'] service_key = (service_id['contextId'], service_id['service_uuid']) - json_reply_service = self.json_reply.setdefault(service_key, list()) + json_reply_service = service_to_paths.setdefault(service_key, list()) no_path_issue = response.get('noPath', {}).get('issue') - if no_path_issue is not None: - json_reply_service.append(None) - continue + if no_path_issue is not None: continue path_endpoints = response['path'][0]['devices'] json_reply_service.append(path_endpoints) algorithm.link_list = self.remove_traversed_links(algorithm.link_list, path_endpoints) + self.json_reply = dict() + response_list = self.json_reply.get('response-list', []) + for service_key,paths in service_to_paths.items(): + response = {'serviceId': { + 'contextId': service_key[0], + 'service_uuid': service_key[1], + }} + response['path'] = paths + if len(paths) < self.num_disjoint: + response['noPath'] = {'issue': 1} + response_list.append(response) + self.logger.debug('self.json_reply = {:s}'.format(str(self.json_reply))) - def get_reply(self) -> PathCompReply: - reply = PathCompReply() - grpc_services : Dict[Tuple[str, str], Service] = {} - grpc_connections : Dict[Tuple[int, str], Connection] = {} - for service_key,paths in self.json_reply.items(): - context_uuid, service_uuid = service_key - - grpc_services[service_key] = self.add_service_to_reply(reply, context_uuid, service_uuid) - - for num_path,service_path_ero in enumerate(paths): - self.logger.warning('num_path={:d}'.format(num_path)) - self.logger.warning('service_path_ero={:s}'.format(str(service_path_ero))) - if service_path_ero is None: continue - path_hops = eropath_to_hops(service_path_ero, self.endpoint_to_link_dict) - self.logger.warning('path_hops={:s}'.format(str(path_hops))) - connections = convert_explicit_path_hops_to_connections(path_hops, self.device_dict, service_uuid) - self.logger.warning('connections={:s}'.format(str(connections))) - - for connection in connections: - connection_uuid,device_layer,path_hops,_ = connection - - service_key = (context_uuid, connection_uuid) - grpc_service = grpc_services.get(service_key) - if grpc_service is not None: continue - grpc_service = self.add_service_to_reply( - reply, context_uuid, connection_uuid, device_layer=device_layer, path_hops=path_hops) - grpc_services[service_key] = grpc_service - - for connection in connections: - connection_uuid,device_layer,path_hops,dependencies = connection - - service_key = (context_uuid, connection_uuid) - grpc_service = grpc_services.get(service_key) - if grpc_service is None: raise Exception('Service({:s}) not found'.format(str(service_key))) - - connection_uuid = '{:s}:{:d}'.format(connection_uuid, num_path) - grpc_connection = grpc_connections.get(connection_uuid) - if grpc_connection is not None: continue - grpc_connection = self.add_connection_to_reply(reply, connection_uuid, grpc_service, path_hops) - grpc_connections[connection_uuid] = grpc_connection - - for sub_service_uuid in dependencies: - sub_service_key = (context_uuid, sub_service_uuid) - grpc_sub_service = grpc_services.get(sub_service_key) - if grpc_sub_service is None: - raise Exception('Service({:s}) not found'.format(str(sub_service_key))) - grpc_sub_service_id = grpc_connection.sub_service_ids.add() - grpc_sub_service_id.CopyFrom(grpc_sub_service.service_id) - - return reply +# def get_reply(self) -> PathCompReply: +# reply = PathCompReply() +# grpc_services : Dict[Tuple[str, str], Service] = {} +# grpc_connections : Dict[Tuple[int, str], Connection] = {} +# for service_key,paths in self.json_reply.items(): +# context_uuid, service_uuid = service_key +# +# grpc_services[service_key] = self.add_service_to_reply(reply, context_uuid, service_uuid) +# +# for num_path,service_path_ero in enumerate(paths): +# self.logger.warning('num_path={:d}'.format(num_path)) +# self.logger.warning('service_path_ero={:s}'.format(str(service_path_ero))) +# if service_path_ero is None: continue +# path_hops = eropath_to_hops(service_path_ero, self.endpoint_to_link_dict) +# self.logger.warning('path_hops={:s}'.format(str(path_hops))) +# connections = convert_explicit_path_hops_to_connections(path_hops, self.device_dict, service_uuid) +# self.logger.warning('connections={:s}'.format(str(connections))) +# +# for connection in connections: +# connection_uuid,device_layer,path_hops,_ = connection +# +# service_key = (context_uuid, connection_uuid) +# grpc_service = grpc_services.get(service_key) +# if grpc_service is not None: continue +# grpc_service = self.add_service_to_reply( +# reply, context_uuid, connection_uuid, device_layer=device_layer, path_hops=path_hops) +# grpc_services[service_key] = grpc_service +# +# for connection in connections: +# connection_uuid,device_layer,path_hops,dependencies = connection +# +# service_key = (context_uuid, connection_uuid) +# grpc_service = grpc_services.get(service_key) +# if grpc_service is None: raise Exception('Service({:s}) not found'.format(str(service_key))) +# +# connection_uuid = '{:s}:{:d}'.format(connection_uuid, num_path) +# grpc_connection = grpc_connections.get(connection_uuid) +# if grpc_connection is not None: continue +# grpc_connection = self.add_connection_to_reply(reply, connection_uuid, grpc_service, path_hops) +# grpc_connections[connection_uuid] = grpc_connection +# +# for sub_service_uuid in dependencies: +# sub_service_key = (context_uuid, sub_service_uuid) +# grpc_sub_service = grpc_services.get(sub_service_key) +# if grpc_sub_service is None: +# raise Exception('Service({:s}) not found'.format(str(sub_service_key))) +# grpc_sub_service_id = grpc_connection.sub_service_ids.add() +# grpc_sub_service_id.CopyFrom(grpc_sub_service.service_id) +# +# return reply -- GitLab From b0bbbacdb3519d61242e90a34bbf3ea1ef0b5305 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sun, 26 Mar 2023 10:44:05 +0000 Subject: [PATCH 07/12] Common - Tools - Descriptors: - Added dummy unload mode --- src/common/tools/descriptor/Loader.py | 49 ++++++++++++++++++++++++--- 1 file changed, 44 insertions(+), 5 deletions(-) diff --git a/src/common/tools/descriptor/Loader.py b/src/common/tools/descriptor/Loader.py index 0e1d8c737..1e238510c 100644 --- a/src/common/tools/descriptor/Loader.py +++ b/src/common/tools/descriptor/Loader.py @@ -222,13 +222,13 @@ class DescriptorLoader: self.__topologies_add = get_descriptors_add_topologies(self.__topologies) if self.__dummy_mode: - self._dummy_mode() + self._load_dummy_mode() else: - self._normal_mode() + self._load_normal_mode() return self.__results - def _dummy_mode(self) -> None: + def _load_dummy_mode(self) -> None: # Dummy Mode: used to pre-load databases (WebUI debugging purposes) with no smart or automated tasks. self.__ctx_cli.connect() self._process_descr('context', 'add', self.__ctx_cli.SetContext, Context, self.__contexts_add ) @@ -242,7 +242,7 @@ class DescriptorLoader: self._process_descr('topology', 'update', self.__ctx_cli.SetTopology, Topology, self.__topologies ) #self.__ctx_cli.close() - def _normal_mode(self) -> None: + def _load_normal_mode(self) -> None: # Normal mode: follows the automated workflows in the different components assert len(self.__connections) == 0, 'in normal mode, connections should not be set' @@ -321,7 +321,35 @@ class DescriptorLoader: response = self.__ctx_cli.ListSlices(ContextId(**json_context_id(context_uuid))) assert len(response.slices) == num_slices - def unload(self) -> None: + def _unload_dummy_mode(self) -> None: + # Dummy Mode: used to pre-load databases (WebUI debugging purposes) with no smart or automated tasks. + self.__ctx_cli.connect() + + for _, slice_list in self.slices.items(): + for slice_ in slice_list: + self.__ctx_cli.RemoveSlice(SliceId(**slice_['slice_id'])) + + for _, service_list in self.services.items(): + for service in service_list: + self.__ctx_cli.RemoveService(ServiceId(**service['service_id'])) + + for link in self.links: + self.__ctx_cli.RemoveLink(LinkId(**link['link_id'])) + + for device in self.devices: + self.__ctx_cli.RemoveDevice(DeviceId(**device['device_id'])) + + for _, topology_list in self.topologies.items(): + for topology in topology_list: + self.__ctx_cli.RemoveTopology(TopologyId(**topology['topology_id'])) + + for context in self.contexts: + self.__ctx_cli.RemoveContext(ContextId(**context['context_id'])) + + #self.__ctx_cli.close() + + def _unload_normal_mode(self) -> None: + # Normal mode: follows the automated workflows in the different components self.__ctx_cli.connect() self.__dev_cli.connect() self.__svc_cli.connect() @@ -348,6 +376,17 @@ class DescriptorLoader: for context in self.contexts: self.__ctx_cli.RemoveContext(ContextId(**context['context_id'])) + #self.__ctx_cli.close() + #self.__dev_cli.close() + #self.__svc_cli.close() + #self.__slc_cli.close() + + def unload(self) -> None: + if self.__dummy_mode: + self._unload_dummy_mode() + else: + self._unload_normal_mode() + def compose_notifications(results : TypeResults) -> TypeNotificationList: notifications = [] for entity_name, action_name, num_ok, error_list in results: -- GitLab From 333d402d5f9f125f03b59e8fa25bb5f552b5ad7a Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sun, 26 Mar 2023 10:44:47 +0000 Subject: [PATCH 08/12] PathComp component - Frontend: - Corrected get_link_from_endpoint in KDisjointPath algorithm --- .../frontend/service/algorithms/KDisjointPathAlgorithm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py index b15c89602..03d9295be 100644 --- a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py +++ b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py @@ -19,7 +19,7 @@ from common.proto.pathcomp_pb2 import Algorithm_KDisjointPath, Algorithm_KShorte from common.tools.grpc.Tools import grpc_message_to_json_string from pathcomp.frontend.service.algorithms.tools.ComputeSubServices import convert_explicit_path_hops_to_connections from pathcomp.frontend.service.algorithms.tools.EroPathToHops import eropath_to_hops -from ._Algorithm import _Algorithm +from ._Algorithm import _Algorithm, SRC_END from .KShortestPathAlgorithm import KShortestPathAlgorithm Service_Id = Tuple[str, str] # (context_uuid, service_uuid) @@ -100,7 +100,7 @@ class KDisjointPathAlgorithm(_Algorithm): def get_link_from_endpoint(self, endpoint : Dict) -> Tuple[Dict, Link]: device_uuid = endpoint['device_id'] endpoint_uuid = endpoint['endpoint_uuid'] - item = self.endpoint_to_link_dict.get((device_uuid, endpoint_uuid)) + item = self.endpoint_to_link_dict.get((device_uuid, endpoint_uuid, SRC_END)) if item is None: MSG = 'Link for Endpoint({:s}, {:s}) not found' self.logger.warning(MSG.format(device_uuid, endpoint_uuid)) -- GitLab From a397783df2f11533c2ed1505aa23c0108e35dae0 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Sun, 26 Mar 2023 11:51:52 +0000 Subject: [PATCH 09/12] PathComp component - Frontend: - Activated debug in log to check CI/CD pipeline --- src/pathcomp/.gitlab-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pathcomp/.gitlab-ci.yml b/src/pathcomp/.gitlab-ci.yml index 20ec4e728..289dc6093 100644 --- a/src/pathcomp/.gitlab-ci.yml +++ b/src/pathcomp/.gitlab-ci.yml @@ -131,7 +131,7 @@ unit_test pathcomp-frontend: - docker logs ${IMAGE_NAME}-backend - > docker exec -i ${IMAGE_NAME}-frontend bash -c - "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/frontend/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}-frontend_report.xml" + "coverage run -m pytest --log-level=DEBUG --verbose $IMAGE_NAME/frontend/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}-frontend_report.xml" - docker exec -i ${IMAGE_NAME}-frontend bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' after_script: -- GitLab From 1e1de1fcc4eb04adba7fdd01d7ecd79c18685ba1 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 27 Mar 2023 09:40:57 +0000 Subject: [PATCH 10/12] PathComp component - Frontend: - Corrected KDisjointPath response generation --- .../frontend/service/algorithms/KDisjointPathAlgorithm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py index 03d9295be..eac91501a 100644 --- a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py +++ b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py @@ -199,7 +199,7 @@ class KDisjointPathAlgorithm(_Algorithm): algorithm.link_list = self.remove_traversed_links(algorithm.link_list, path_endpoints) self.json_reply = dict() - response_list = self.json_reply.get('response-list', []) + response_list = self.json_reply.setdefault('response-list', []) for service_key,paths in service_to_paths.items(): response = {'serviceId': { 'contextId': service_key[0], -- GitLab From a00c756c2e43da93bb20f3dd53f4e2bbf6148f98 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 27 Mar 2023 11:10:57 +0000 Subject: [PATCH 11/12] PathComp component - Frontend: - Corrected KDisjointPath response generation --- .../frontend/service/algorithms/KDisjointPathAlgorithm.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py index eac91501a..eda8ae5c5 100644 --- a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py +++ b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py @@ -194,9 +194,9 @@ class KDisjointPathAlgorithm(_Algorithm): no_path_issue = response.get('noPath', {}).get('issue') if no_path_issue is not None: continue - path_endpoints = response['path'][0]['devices'] + path_endpoints = response['path'][0] json_reply_service.append(path_endpoints) - algorithm.link_list = self.remove_traversed_links(algorithm.link_list, path_endpoints) + algorithm.link_list = self.remove_traversed_links(algorithm.link_list, path_endpoints['devices']) self.json_reply = dict() response_list = self.json_reply.setdefault('response-list', []) -- GitLab From 8766c63b6c8b76c97b20fb7ffdeea99af9388833 Mon Sep 17 00:00:00 2001 From: gifrerenom Date: Mon, 27 Mar 2023 16:30:22 +0000 Subject: [PATCH 12/12] PathComp component - Frontend: - Pre-merge code polishing - Restored default log level --- src/pathcomp/.gitlab-ci.yml | 2 +- .../algorithms/KDisjointPathAlgorithm.py | 57 +------------------ 2 files changed, 3 insertions(+), 56 deletions(-) diff --git a/src/pathcomp/.gitlab-ci.yml b/src/pathcomp/.gitlab-ci.yml index 289dc6093..20ec4e728 100644 --- a/src/pathcomp/.gitlab-ci.yml +++ b/src/pathcomp/.gitlab-ci.yml @@ -131,7 +131,7 @@ unit_test pathcomp-frontend: - docker logs ${IMAGE_NAME}-backend - > docker exec -i ${IMAGE_NAME}-frontend bash -c - "coverage run -m pytest --log-level=DEBUG --verbose $IMAGE_NAME/frontend/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}-frontend_report.xml" + "coverage run -m pytest --log-level=INFO --verbose $IMAGE_NAME/frontend/tests/test_unitary.py --junitxml=/opt/results/${IMAGE_NAME}-frontend_report.xml" - docker exec -i ${IMAGE_NAME}-frontend bash -c "coverage report --include='${IMAGE_NAME}/*' --show-missing" coverage: '/TOTAL\s+\d+\s+\d+\s+(\d+%)/' after_script: diff --git a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py index eda8ae5c5..144246620 100644 --- a/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py +++ b/src/pathcomp/frontend/service/algorithms/KDisjointPathAlgorithm.py @@ -14,11 +14,9 @@ import operator from typing import Dict, List, Optional, Set, Tuple -from common.proto.context_pb2 import Connection, Link, Service -from common.proto.pathcomp_pb2 import Algorithm_KDisjointPath, Algorithm_KShortestPath, PathCompReply, PathCompRequest +from common.proto.context_pb2 import Link +from common.proto.pathcomp_pb2 import Algorithm_KDisjointPath, Algorithm_KShortestPath, PathCompRequest from common.tools.grpc.Tools import grpc_message_to_json_string -from pathcomp.frontend.service.algorithms.tools.ComputeSubServices import convert_explicit_path_hops_to_connections -from pathcomp.frontend.service.algorithms.tools.EroPathToHops import eropath_to_hops from ._Algorithm import _Algorithm, SRC_END from .KShortestPathAlgorithm import KShortestPathAlgorithm @@ -211,54 +209,3 @@ class KDisjointPathAlgorithm(_Algorithm): response_list.append(response) self.logger.debug('self.json_reply = {:s}'.format(str(self.json_reply))) - -# def get_reply(self) -> PathCompReply: -# reply = PathCompReply() -# grpc_services : Dict[Tuple[str, str], Service] = {} -# grpc_connections : Dict[Tuple[int, str], Connection] = {} -# for service_key,paths in self.json_reply.items(): -# context_uuid, service_uuid = service_key -# -# grpc_services[service_key] = self.add_service_to_reply(reply, context_uuid, service_uuid) -# -# for num_path,service_path_ero in enumerate(paths): -# self.logger.warning('num_path={:d}'.format(num_path)) -# self.logger.warning('service_path_ero={:s}'.format(str(service_path_ero))) -# if service_path_ero is None: continue -# path_hops = eropath_to_hops(service_path_ero, self.endpoint_to_link_dict) -# self.logger.warning('path_hops={:s}'.format(str(path_hops))) -# connections = convert_explicit_path_hops_to_connections(path_hops, self.device_dict, service_uuid) -# self.logger.warning('connections={:s}'.format(str(connections))) -# -# for connection in connections: -# connection_uuid,device_layer,path_hops,_ = connection -# -# service_key = (context_uuid, connection_uuid) -# grpc_service = grpc_services.get(service_key) -# if grpc_service is not None: continue -# grpc_service = self.add_service_to_reply( -# reply, context_uuid, connection_uuid, device_layer=device_layer, path_hops=path_hops) -# grpc_services[service_key] = grpc_service -# -# for connection in connections: -# connection_uuid,device_layer,path_hops,dependencies = connection -# -# service_key = (context_uuid, connection_uuid) -# grpc_service = grpc_services.get(service_key) -# if grpc_service is None: raise Exception('Service({:s}) not found'.format(str(service_key))) -# -# connection_uuid = '{:s}:{:d}'.format(connection_uuid, num_path) -# grpc_connection = grpc_connections.get(connection_uuid) -# if grpc_connection is not None: continue -# grpc_connection = self.add_connection_to_reply(reply, connection_uuid, grpc_service, path_hops) -# grpc_connections[connection_uuid] = grpc_connection -# -# for sub_service_uuid in dependencies: -# sub_service_key = (context_uuid, sub_service_uuid) -# grpc_sub_service = grpc_services.get(sub_service_key) -# if grpc_sub_service is None: -# raise Exception('Service({:s}) not found'.format(str(sub_service_key))) -# grpc_sub_service_id = grpc_connection.sub_service_ids.add() -# grpc_sub_service_id.CopyFrom(grpc_sub_service.service_id) -# -# return reply -- GitLab