diff --git a/src/forecaster/service/TODO.txt b/src/forecaster/service/TODO.txt index 95d3fba03084bd1104d49abafc8eb426af5d8c35..e1c8532026f8e35ccbbe307000e380b22b9d103e 100644 --- a/src/forecaster/service/TODO.txt +++ b/src/forecaster/service/TODO.txt @@ -1,3 +1,20 @@ + +Use a smaller network: +INFO forecaster.tests.Tools:Tools.py:75 Discovering Devices and Links... +INFO forecaster.tests.Tools:Tools.py:104 Found 22 devices and 462 links... + +ERROR forecaster.service.ForecasterServiceServicerImpl:Decorator.py:233 ForecastTopologyCapacity exception +Traceback (most recent call last): + File "/home/tfs/tfs-ctrl/src/common/method_wrappers/Decorator.py", line 220, in inner_wrapper + reply = func(self, request, grpc_context) + File "/home/tfs/tfs-ctrl/src/forecaster/service/ForecasterServiceServicerImpl.py", line 94, in ForecastTopologyCapacity + link_capacities[link.link_id] = link.attributes +TypeError: unhashable type: 'LinkId' + + + + + test case: - instantiate Context and Monitoring - populate topology diff --git a/src/forecaster/tests/test_unitary.py b/src/forecaster/tests/test_unitary.py index 8e2c8821a2f4df25492ca750f9bfdb5f2128c2fd..0be9e5f06dc224667c5f4764ab47e0964d39f41f 100644 --- a/src/forecaster/tests/test_unitary.py +++ b/src/forecaster/tests/test_unitary.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -import logging, pandas, pytest +import json, logging, pandas, pytest from typing import Dict, Tuple from common.Constants import DEFAULT_CONTEXT_NAME, DEFAULT_TOPOLOGY_NAME from common.proto.context_pb2 import ContextId, TopologyId @@ -37,11 +37,14 @@ ADMIN_TOPOLOGY_ID = TopologyId(**json_topology_id(DEFAULT_TOPOLOGY_NAME, context CSV_DATA_FILE = 'forecaster/tests/data/dataset.csv' #CSV_DATA_FILE = 'forecaster/tests/data/dataset2.csv' +DESC_DATS_FILE = 'forecaster/tests/data/descriptor.json' @pytest.fixture(scope='session') def scenario() -> Tuple[pandas.DataFrame, Dict]: df = read_csv(CSV_DATA_FILE) descriptors = compose_descriptors(df) + with open(DESC_DATS_FILE, 'w', encoding='UTF-8') as f: + f.write(json.dumps(descriptors)) yield df, descriptors def test_prepare_environment(