Skip to content
Snippets Groups Projects
Commit e12677e6 authored by Lluis Gifre Renom's avatar Lluis Gifre Renom
Browse files

Merge branch 'fix/release-2' into 'develop'

Fixes for OFC'22 and ECOC'22 tests

See merge request !48
parents c3ede397 5f5099b1
No related branches found
No related tags found
2 merge requests!54Release 2.0.0,!48Fixes for OFC'22 and ECOC'22 tests
Showing
with 242 additions and 39 deletions
...@@ -75,19 +75,19 @@ export NATS_REDEPLOY="" ...@@ -75,19 +75,19 @@ export NATS_REDEPLOY=""
# ----- QuestDB ---------------------------------------------------------------- # ----- QuestDB ----------------------------------------------------------------
# If not already set, set the namespace where QuestDB will be deployed. # If not already set, set the namespace where QuestDB will be deployed.
export QDB_NAMESPACE=${QDB_NAMESPACE:-"qdb"} export QDB_NAMESPACE="qdb"
# If not already set, set the database username to be used by Monitoring. # If not already set, set the database username to be used by Monitoring.
export QDB_USERNAME=${QDB_USERNAME:-"admin"} export QDB_USERNAME="admin"
# If not already set, set the database user's password to be used by Monitoring. # If not already set, set the database user's password to be used by Monitoring.
export QDB_PASSWORD=${QDB_PASSWORD:-"quest"} export QDB_PASSWORD="quest"
# If not already set, set the table name to be used by Monitoring. # If not already set, set the table name to be used by Monitoring.
export QDB_TABLE=${QDB_TABLE:-"tfs_monitoring"} export QDB_TABLE="tfs_monitoring"
## If not already set, disable flag for dropping table if exists. ## If not already set, disable flag for dropping table if exists.
#export QDB_DROP_TABLE_IF_EXISTS=${QDB_DROP_TABLE_IF_EXISTS:-""} #export QDB_DROP_TABLE_IF_EXISTS=""
# If not already set, disable flag for re-deploying QuestDB from scratch. # If not already set, disable flag for re-deploying QuestDB from scratch.
export QDB_REDEPLOY=${QDB_REDEPLOY:-""} export QDB_REDEPLOY=""
...@@ -63,7 +63,9 @@ class KDisjointPathAlgorithm(_Algorithm): ...@@ -63,7 +63,9 @@ class KDisjointPathAlgorithm(_Algorithm):
if constraint.WhichOneof('constraint') == 'endpoint_location': if constraint.WhichOneof('constraint') == 'endpoint_location':
endpoint_id = constraint.endpoint_location.endpoint_id endpoint_id = constraint.endpoint_location.endpoint_id
device_uuid = endpoint_id.device_id.device_uuid.uuid device_uuid = endpoint_id.device_id.device_uuid.uuid
device_uuid = self.device_name_mapping.get(device_uuid, device_uuid)
endpoint_uuid = endpoint_id.endpoint_uuid.uuid endpoint_uuid = endpoint_id.endpoint_uuid.uuid
endpoint_uuid = self.endpoint_name_mapping.get((device_uuid, endpoint_uuid), endpoint_uuid)
location_kind = constraint.endpoint_location.location.WhichOneof('location') location_kind = constraint.endpoint_location.location.WhichOneof('location')
if location_kind != 'region': if location_kind != 'region':
MSG = 'Unsupported LocationType({:s}) in Constraint({:s})' MSG = 'Unsupported LocationType({:s}) in Constraint({:s})'
...@@ -74,7 +76,9 @@ class KDisjointPathAlgorithm(_Algorithm): ...@@ -74,7 +76,9 @@ class KDisjointPathAlgorithm(_Algorithm):
if constraint.WhichOneof('constraint') == 'endpoint_priority': if constraint.WhichOneof('constraint') == 'endpoint_priority':
endpoint_id = constraint.endpoint_priority.endpoint_id endpoint_id = constraint.endpoint_priority.endpoint_id
device_uuid = endpoint_id.device_id.device_uuid.uuid device_uuid = endpoint_id.device_id.device_uuid.uuid
device_uuid = self.device_name_mapping.get(device_uuid, device_uuid)
endpoint_uuid = endpoint_id.endpoint_uuid.uuid endpoint_uuid = endpoint_id.endpoint_uuid.uuid
endpoint_uuid = self.endpoint_name_mapping.get((device_uuid, endpoint_uuid), endpoint_uuid)
priority = constraint.endpoint_priority.priority priority = constraint.endpoint_priority.priority
endpoints.setdefault((device_uuid, endpoint_uuid), dict())['priority'] = priority endpoints.setdefault((device_uuid, endpoint_uuid), dict())['priority'] = priority
...@@ -116,8 +120,10 @@ class KDisjointPathAlgorithm(_Algorithm): ...@@ -116,8 +120,10 @@ class KDisjointPathAlgorithm(_Algorithm):
algorithm = KShortestPathAlgorithm(Algorithm_KShortestPath(k_inspection=0, k_return=1)) algorithm = KShortestPathAlgorithm(Algorithm_KShortestPath(k_inspection=0, k_return=1))
algorithm.sync_paths = True algorithm.sync_paths = True
algorithm.device_list = self.device_list algorithm.device_list = self.device_list
algorithm.device_name_mapping = self.device_name_mapping
algorithm.device_dict = self.device_dict algorithm.device_dict = self.device_dict
algorithm.endpoint_dict = self.endpoint_dict algorithm.endpoint_dict = self.endpoint_dict
algorithm.endpoint_name_mapping = self.endpoint_name_mapping
algorithm.link_list = self.link_list algorithm.link_list = self.link_list
algorithm.link_dict = self.link_dict algorithm.link_dict = self.link_dict
algorithm.endpoint_to_link_dict = self.endpoint_to_link_dict algorithm.endpoint_to_link_dict = self.endpoint_to_link_dict
...@@ -139,6 +145,7 @@ class KDisjointPathAlgorithm(_Algorithm): ...@@ -139,6 +145,7 @@ class KDisjointPathAlgorithm(_Algorithm):
_service.service_id.context_id.context_uuid.uuid = service_key[0] _service.service_id.context_id.context_uuid.uuid = service_key[0]
_service.service_id.service_uuid.uuid = service_key[1] _service.service_id.service_uuid.uuid = service_key[1]
_service.service_type = service_type _service.service_type = service_type
for constraint_type, constraint_value in constraints.items(): for constraint_type, constraint_value in constraints.items():
constraint = _service.service_constraints.add() constraint = _service.service_constraints.add()
constraint.custom.constraint_type = constraint_type constraint.custom.constraint_type = constraint_type
......
...@@ -40,7 +40,9 @@ class _Algorithm: ...@@ -40,7 +40,9 @@ class _Algorithm:
self.device_list : List[Dict] = list() self.device_list : List[Dict] = list()
self.device_dict : Dict[str, Tuple[Dict, Device]] = dict() self.device_dict : Dict[str, Tuple[Dict, Device]] = dict()
self.device_name_mapping : Dict[str, str] = dict()
self.endpoint_dict : Dict[str, Dict[str, Tuple[Dict, EndPointId]]] = dict() self.endpoint_dict : Dict[str, Dict[str, Tuple[Dict, EndPointId]]] = dict()
self.endpoint_name_mapping : Dict[Tuple[str, str], str] = dict()
self.link_list : List[Dict] = list() self.link_list : List[Dict] = list()
self.link_dict : Dict[str, Tuple[Dict, Link]] = dict() self.link_dict : Dict[str, Tuple[Dict, Link]] = dict()
self.endpoint_to_link_dict : Dict[Tuple[str, str], Tuple[Dict, Link]] = dict() self.endpoint_to_link_dict : Dict[Tuple[str, str], Tuple[Dict, Link]] = dict()
...@@ -56,12 +58,21 @@ class _Algorithm: ...@@ -56,12 +58,21 @@ class _Algorithm:
device_uuid = json_device['device_Id'] device_uuid = json_device['device_Id']
self.device_dict[device_uuid] = (json_device, grpc_device) self.device_dict[device_uuid] = (json_device, grpc_device)
_device_uuid = grpc_device.device_id.device_uuid.uuid
_device_name = grpc_device.name
self.device_name_mapping[_device_name] = _device_uuid
device_endpoint_dict : Dict[str, Tuple[Dict, EndPointId]] = dict() device_endpoint_dict : Dict[str, Tuple[Dict, EndPointId]] = dict()
for json_endpoint,grpc_endpoint in zip(json_device['device_endpoints'], grpc_device.device_endpoints): for json_endpoint,grpc_endpoint in zip(json_device['device_endpoints'], grpc_device.device_endpoints):
endpoint_uuid = json_endpoint['endpoint_id']['endpoint_uuid'] endpoint_uuid = json_endpoint['endpoint_id']['endpoint_uuid']
endpoint_tuple = (json_endpoint['endpoint_id'], grpc_endpoint.endpoint_id) endpoint_tuple = (json_endpoint['endpoint_id'], grpc_endpoint.endpoint_id)
device_endpoint_dict[endpoint_uuid] = endpoint_tuple device_endpoint_dict[endpoint_uuid] = endpoint_tuple
_endpoint_uuid = grpc_endpoint.endpoint_id.endpoint_uuid.uuid
_endpoint_name = grpc_endpoint.name
self.endpoint_name_mapping[(_device_uuid, _endpoint_name)] = _endpoint_uuid
self.endpoint_name_mapping[(_device_name, _endpoint_name)] = _endpoint_uuid
self.endpoint_dict[device_uuid] = device_endpoint_dict self.endpoint_dict[device_uuid] = device_endpoint_dict
def add_links(self, grpc_links : Union[List[Link], LinkList]) -> None: def add_links(self, grpc_links : Union[List[Link], LinkList]) -> None:
......
# Set the URL of your local Docker registry where the images will be uploaded to. #!/bin/bash
export TFS_REGISTRY_IMAGE="http://localhost:32000/tfs/" # Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----- TeraFlowSDN ------------------------------------------------------------
# Set the URL of the internal MicroK8s Docker registry where the images will be uploaded to.
export TFS_REGISTRY_IMAGES="http://localhost:32000/tfs/"
# Set the list of components, separated by spaces, you want to build images for, and deploy. # Set the list of components, separated by spaces, you want to build images for, and deploy.
#export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui load_generator"
export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui" export TFS_COMPONENTS="context device automation monitoring pathcomp service slice compute webui"
# Set the tag you want to use for your images. # Set the tag you want to use for your images.
export TFS_IMAGE_TAG="dev" export TFS_IMAGE_TAG="dev"
# Set the name of the Kubernetes namespace to deploy to. # Set the name of the Kubernetes namespace to deploy TFS to.
export TFS_K8S_NAMESPACE="tfs" export TFS_K8S_NAMESPACE="tfs"
# Set additional manifest files to be applied after the deployment # Set additional manifest files to be applied after the deployment
export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml" export TFS_EXTRA_MANIFESTS="manifests/nginx_ingress_http.yaml"
# Set the neew Grafana admin password # Set the new Grafana admin password
export TFS_GRAFANA_PASSWORD="admin123+" export TFS_GRAFANA_PASSWORD="admin123+"
# Disable skip-build flag to rebuild the Docker images.
export TFS_SKIP_BUILD=""
# ----- CockroachDB ------------------------------------------------------------
# Set the namespace where CockroackDB will be deployed.
export CRDB_NAMESPACE="crdb"
# Set the database username to be used by Context.
export CRDB_USERNAME="tfs"
# Set the database user's password to be used by Context.
export CRDB_PASSWORD="tfs123"
# Set the database name to be used by Context.
export CRDB_DATABASE="tfs"
# Set CockroachDB installation mode to 'single'. This option is convenient for development and testing.
# See ./deploy/all.sh or ./deploy/crdb.sh for additional details
export CRDB_DEPLOY_MODE="single"
# Disable flag for dropping database, if exists.
export CRDB_DROP_DATABASE_IF_EXISTS=""
# Disable flag for re-deploying CockroachDB from scratch.
export CRDB_REDEPLOY=""
# ----- NATS -------------------------------------------------------------------
# Set the namespace where NATS will be deployed.
export NATS_NAMESPACE="nats"
# Disable flag for re-deploying NATS from scratch.
export NATS_REDEPLOY=""
# ----- QuestDB ----------------------------------------------------------------
# If not already set, set the namespace where QuestDB will be deployed.
export QDB_NAMESPACE="qdb"
# If not already set, set the database username to be used by Monitoring.
export QDB_USERNAME="admin"
# If not already set, set the database user's password to be used by Monitoring.
export QDB_PASSWORD="quest"
# If not already set, set the table name to be used by Monitoring.
export QDB_TABLE="tfs_monitoring"
## If not already set, disable flag for dropping table if exists.
#export QDB_DROP_TABLE_IF_EXISTS=""
# If not already set, disable flag for re-deploying QuestDB from scratch.
export QDB_REDEPLOY=""
{ {
"contexts": [ "contexts": [
{ {"context_id": {"context_uuid": {"uuid": "admin"}}}
"context_id": {"context_uuid": {"uuid": "admin"}},
"topology_ids": [], "service_ids": []
}
], ],
"topologies": [ "topologies": [
{ {"topology_id": {"context_id": {"context_uuid": {"uuid": "admin"}}, "topology_uuid": {"uuid": "admin"}}}
"topology_id": {"context_id": {"context_uuid": {"uuid": "admin"}}, "topology_uuid": {"uuid": "admin"}},
"device_ids": [], "link_ids": []
}
], ],
"devices": [ "devices": [
{ {
...@@ -17,7 +11,11 @@ ...@@ -17,7 +11,11 @@
"device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [
{"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}},
{"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}},
{"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"int\"}]}"}} {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [
{"sample_types": [], "type": "copper", "uuid": "eth1"},
{"sample_types": [], "type": "copper", "uuid": "eth2"},
{"sample_types": [], "type": "copper", "uuid": "int"}
]}}}
]} ]}
}, },
{ {
...@@ -25,39 +23,55 @@ ...@@ -25,39 +23,55 @@
"device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [
{"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}},
{"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}},
{"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"eth2\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"int\"}]}"}} {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [
{"sample_types": [], "type": "copper", "uuid": "eth1"},
{"sample_types": [], "type": "copper", "uuid": "eth2"},
{"sample_types": [], "type": "copper", "uuid": "int"}
]}}}
]} ]}
}, },
{ {
"device_id": {"device_uuid": {"uuid": "CS1-GW1"}}, "device_type": "packet-router", "device_drivers": [1], "device_id": {"device_uuid": {"uuid": "CS1-GW1"}}, "device_type": "emu-packet-router", "device_drivers": [1],
"device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [
{"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}},
{"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}},
{"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}]}"}} {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [
{"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "10/1"},
{"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"}
]}}}
]} ]}
}, },
{ {
"device_id": {"device_uuid": {"uuid": "CS1-GW2"}}, "device_type": "packet-router", "device_drivers": [1], "device_id": {"device_uuid": {"uuid": "CS1-GW2"}}, "device_type": "emu-packet-router", "device_drivers": [1],
"device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [
{"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}},
{"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}},
{"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}]}"}} {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [
{"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "10/1"},
{"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"}
]}}}
]} ]}
}, },
{ {
"device_id": {"device_uuid": {"uuid": "CS2-GW1"}}, "device_type": "packet-router", "device_drivers": [1], "device_id": {"device_uuid": {"uuid": "CS2-GW1"}}, "device_type": "emu-packet-router", "device_drivers": [1],
"device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [
{"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}},
{"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}},
{"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}]}"}} {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [
{"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "10/1"},
{"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"}
]}}}
]} ]}
}, },
{ {
"device_id": {"device_uuid": {"uuid": "CS2-GW2"}}, "device_type": "packet-router", "device_drivers": [1], "device_id": {"device_uuid": {"uuid": "CS2-GW2"}}, "device_type": "emu-packet-router", "device_drivers": [1],
"device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [
{"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}},
{"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}},
{"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"10/1\"}, {\"sample_types\": [], \"type\": \"copper\", \"uuid\": \"1/1\"}]}"}} {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [
{"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "10/1"},
{"sample_types": [101, 102, 201, 202], "type": "copper", "uuid": "1/1"}
]}}}
]} ]}
}, },
{ {
...@@ -65,7 +79,12 @@ ...@@ -65,7 +79,12 @@
"device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [ "device_endpoints": [], "device_operational_status": 1, "device_config": {"config_rules": [
{"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}}, {"action": 1, "custom": {"resource_key": "_connect/address", "resource_value": "127.0.0.1"}},
{"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}}, {"action": 1, "custom": {"resource_key": "_connect/port", "resource_value": "0"}},
{"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": "{\"endpoints\": [{\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"aade6001-f00b-5e2f-a357-6a0a9d3de870\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"eb287d83-f05e-53ec-ab5a-adf6bd2b5418\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"0ef74f99-1acc-57bd-ab9d-4b958b06c513\"}, {\"sample_types\": [], \"type\": \"optical\", \"uuid\": \"50296d99-58cc-5ce7-82f5-fc8ee4eec2ec\"}]}"}} {"action": 1, "custom": {"resource_key": "_connect/settings", "resource_value": {"endpoints": [
{"sample_types": [], "type": "optical", "uuid": "aade6001-f00b-5e2f-a357-6a0a9d3de870"},
{"sample_types": [], "type": "optical", "uuid": "eb287d83-f05e-53ec-ab5a-adf6bd2b5418"},
{"sample_types": [], "type": "optical", "uuid": "0ef74f99-1acc-57bd-ab9d-4b958b06c513"},
{"sample_types": [], "type": "optical", "uuid": "50296d99-58cc-5ce7-82f5-fc8ee4eec2ec"}
]}}}
]} ]}
} }
], ],
......
#!/bin/bash #!/bin/bash
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
source ecoc22/deploy_specs.sh source ecoc22/deploy_specs.sh
./deploy.sh ./deploy/all.sh
source tfs_runtime_env_vars.sh source tfs_runtime_env_vars.sh
...@@ -14,4 +14,4 @@ ...@@ -14,4 +14,4 @@
# limitations under the License. # limitations under the License.
source tfs_runtime_env_vars.sh source tfs_runtime_env_vars.sh
pytest --verbose src/tests/ecoc22/tests/test_functional_bootstrap.py pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_bootstrap.py
...@@ -14,4 +14,4 @@ ...@@ -14,4 +14,4 @@
# limitations under the License. # limitations under the License.
source tfs_runtime_env_vars.sh source tfs_runtime_env_vars.sh
pytest --verbose src/tests/ecoc22/tests/test_functional_create_service.py pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_create_service.py
...@@ -14,4 +14,4 @@ ...@@ -14,4 +14,4 @@
# limitations under the License. # limitations under the License.
source tfs_runtime_env_vars.sh source tfs_runtime_env_vars.sh
pytest --verbose src/tests/ecoc22/tests/test_functional_delete_service.py pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_delete_service.py
...@@ -14,4 +14,4 @@ ...@@ -14,4 +14,4 @@
# limitations under the License. # limitations under the License.
source tfs_runtime_env_vars.sh source tfs_runtime_env_vars.sh
pytest --verbose src/tests/ecoc22/tests/test_functional_cleanup.py pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_cleanup.py
...@@ -13,32 +13,9 @@ ...@@ -13,32 +13,9 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
# Run functional tests
PROJECTDIR=`pwd`
RCFILE=$PROJECTDIR/coverage/.coveragerc
COVERAGEFILE=$PROJECTDIR/coverage/.coverage
# Configure the correct folder on the .coveragerc file
cat $PROJECTDIR/coverage/.coveragerc.template | sed s+~/teraflow/controller+$PROJECTDIR+g > $RCFILE
# Destroy old coverage file
rm -f $COVERAGEFILE
source tfs_runtime_env_vars.sh source tfs_runtime_env_vars.sh
pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_bootstrap.py
# Force a flush of Context database pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_create_service.py
kubectl --namespace $TFS_K8S_NAMESPACE exec -it deployment/contextservice --container redis -- redis-cli FLUSHALL pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_delete_service.py
pytest --verbose --log-level=INFO src/tests/ecoc22/tests/test_functional_cleanup.py
# Run functional tests and analyze code coverage at the same time
coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
src/tests/ecoc22/tests/test_functional_bootstrap.py
coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
src/tests/ecoc22/tests/test_functional_create_service.py
coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
src/tests/ecoc22/tests/test_functional_delete_service.py
coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
src/tests/ecoc22/tests/test_functional_cleanup.py
# Add here your files containing confidential testbed details such as IP addresses, ports, usernames, passwords, etc. # Add here your files containing confidential testbed details such as IP addresses, ports, usernames, passwords, etc.
Credentials.py
...@@ -12,15 +12,17 @@ ...@@ -12,15 +12,17 @@
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import pytest import pytest, logging
from common.Settings import get_setting from common.Settings import get_setting
from compute.tests.mock_osm.MockOSM import MockOSM from tests.tools.mock_osm.Constants import WIM_PASSWORD, WIM_USERNAME
#from .Objects_BigNet import WIM_MAPPING, WIM_PASSWORD, WIM_USERNAME from tests.tools.mock_osm.MockOSM import MockOSM
from .Objects_DC_CSGW_TN import WIM_MAPPING, WIM_PASSWORD, WIM_USERNAME from .Objects import WIM_MAPPING
#from .Objects_DC_CSGW_TN_OLS import WIM_MAPPING, WIM_PASSWORD, WIM_USERNAME
LOGGER = logging.getLogger(__name__)
@pytest.fixture(scope='session') @pytest.fixture(scope='session')
def osm_wim(): def osm_wim():
wim_url = 'http://{:s}:{:s}'.format( wim_url = 'http://{:s}:{:s}'.format(
get_setting('COMPUTESERVICE_SERVICE_HOST'), str(get_setting('COMPUTESERVICE_SERVICE_PORT_HTTP'))) get_setting('COMPUTESERVICE_SERVICE_HOST'), str(get_setting('COMPUTESERVICE_SERVICE_PORT_HTTP')))
LOGGER.info('WIM_MAPPING = {:s}'.format(str(WIM_MAPPING)))
return MockOSM(wim_url, WIM_MAPPING, WIM_USERNAME, WIM_PASSWORD) return MockOSM(wim_url, WIM_MAPPING, WIM_USERNAME, WIM_PASSWORD)
# Copyright 2021-2023 H2020 TeraFlow (https://www.teraflow-h2020.eu/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from common.tools.object_factory.Device import json_device_id
from common.tools.object_factory.EndPoint import json_endpoint_id
from tests.tools.mock_osm.Tools import connection_point, wim_mapping
# ----- WIM Service Settings -------------------------------------------------------------------------------------------
# PRI = primary // BKP = backup
SITE_ID_DC1 = 'DC1'
DEV_ID_DC1 = json_device_id('DC1-GW')
EP_ID_DC1_PRI = json_endpoint_id(DEV_ID_DC1, 'eth1')
EP_ID_DC1_BKP = json_endpoint_id(DEV_ID_DC1, 'eth2')
DEV_ID_CS1GW1 = json_device_id('CS1-GW1')
DEV_ID_CS1GW2 = json_device_id('CS1-GW2')
SITE_ID_DC2 = 'DC2'
DEV_ID_DC2 = json_device_id('DC2-GW')
EP_ID_DC2_PRI = json_endpoint_id(DEV_ID_DC2, 'eth1')
EP_ID_DC2_BKP = json_endpoint_id(DEV_ID_DC2, 'eth2')
DEV_ID_CS2GW1 = json_device_id('CS2-GW1')
DEV_ID_CS2GW2 = json_device_id('CS2-GW2')
WIM_SEP_DC1_PRI, WIM_MAP_DC1_PRI = wim_mapping(SITE_ID_DC1, EP_ID_DC1_PRI, DEV_ID_CS1GW1, priority=10, redundant=['DC1:DC1-GW:eth2'])
WIM_SEP_DC1_BKP, WIM_MAP_DC1_BKP = wim_mapping(SITE_ID_DC1, EP_ID_DC1_BKP, DEV_ID_CS1GW2, priority=20, redundant=['DC1:DC1-GW:eth1'])
WIM_SEP_DC2_PRI, WIM_MAP_DC2_PRI = wim_mapping(SITE_ID_DC2, EP_ID_DC2_PRI, DEV_ID_CS2GW1, priority=10, redundant=['DC2:DC2-GW:eth2'])
WIM_SEP_DC2_BKP, WIM_MAP_DC2_BKP = wim_mapping(SITE_ID_DC2, EP_ID_DC2_BKP, DEV_ID_CS2GW2, priority=20, redundant=['DC2:DC2-GW:eth1'])
WIM_MAPPING = [
WIM_MAP_DC1_PRI, WIM_MAP_DC1_BKP,
WIM_MAP_DC2_PRI, WIM_MAP_DC2_BKP,
]
WIM_SRV_VLAN_ID = 300
WIM_SERVICE_TYPE = 'ELAN'
WIM_SERVICE_CONNECTION_POINTS = [
connection_point(WIM_SEP_DC1_PRI, 'dot1q', WIM_SRV_VLAN_ID),
connection_point(WIM_SEP_DC2_PRI, 'dot1q', WIM_SRV_VLAN_ID),
]
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment