Skip to content
Snippets Groups Projects
Commit 8d581e12 authored by Lluis Gifre Renom's avatar Lluis Gifre Renom
Browse files

Forecaster component:

- Corrected unitary tests
- Added info log messages in tests
- Activated live log in test scripts
parent ea3e90c3
No related branches found
No related tags found
2 merge requests!235Release TeraFlowSDN 3.0,!160Resolve "(CTTC) Forecaster component"
......@@ -20,5 +20,6 @@ cd $PROJECTDIR/src
RCFILE=$PROJECTDIR/coverage/.coveragerc
# Run unitary tests and analyze coverage of code at same time
coverage run --rcfile=$RCFILE --append -m pytest --log-level=INFO --verbose \
# helpful pytest flags: --log-level=INFO -o log_cli=true --verbose --maxfail=1 --durations=0
coverage run --rcfile=$RCFILE --append -m pytest --log-level=DEBUG -o log_cli=true --verbose \
forecaster/tests/test_unitary.py
......@@ -12,17 +12,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import calendar, math, pandas
import calendar, logging, math, pandas
from datetime import datetime, timezone
from typing import Dict
from common.tools.object_factory.Context import json_context
from common.tools.object_factory.Device import (
json_device_emulated_connect_rules, json_device_emulated_packet_router_disabled, json_device_id
)
from common.tools.object_factory.EndPoint import json_endpoint_descriptor, json_endpoint_id
from common.tools.object_factory.EndPoint import json_endpoint, json_endpoint_descriptor, json_endpoint_id
from common.tools.object_factory.Link import json_link
from common.tools.object_factory.Topology import json_topology
LOGGER = logging.getLogger(__name__)
def time_datetime_to_int(dt_time : datetime) -> int:
return int(calendar.timegm(dt_time.timetuple()))
......@@ -36,8 +38,13 @@ def time_utc_now_to_float() -> float:
return time_datetime_to_float(time_utc_now_to_datetime())
def read_csv(csv_file : str) -> pandas.DataFrame:
LOGGER.info('Using Data File "{:s}"...'.format(csv_file))
LOGGER.info('Loading...')
df = pandas.read_csv(csv_file)
LOGGER.info(' DONE')
LOGGER.info('Parsing and Adapting columns...')
if 'dataset.csv' in csv_file:
df.rename(columns={'linkid': 'link_id', 'ds': 'timestamp', 'y': 'used_capacity_gbps'}, inplace=True)
df[['source', 'destination']] = df['link_id'].str.split('_', expand=True)
......@@ -46,18 +53,26 @@ def read_csv(csv_file : str) -> pandas.DataFrame:
df.rename(columns={
'target': 'destination', 'id': 'link_id', 'ds': 'timestamp', 'demandValue': 'used_capacity_gbps'
}, inplace=True)
LOGGER.info(' DONE')
LOGGER.info('Updating timestamps...')
df['timestamp'] = pandas.to_datetime(df['timestamp'])
max_timestamp = time_datetime_to_int(df['timestamp'].max())
now_timestamp = time_datetime_to_int(datetime.now(tz=timezone.utc))
df['timestamp'] = df['timestamp'] + pandas.offsets.Second(now_timestamp - max_timestamp)
LOGGER.info(' DONE')
LOGGER.info('Sorting...')
df.sort_values('timestamp', ascending=True, inplace=True)
LOGGER.info(' DONE')
return df
def compose_descriptors(df : pandas.DataFrame) -> Dict:
devices = dict()
links = dict()
LOGGER.info('Discovering Devices and Links...')
#df1.groupby(['A','B']).size().reset_index().rename(columns={0:'count'})
df_links = df[['link_id', 'source', 'destination']].drop_duplicates()
for row in df_links.itertuples(index=False):
......@@ -86,18 +101,21 @@ def compose_descriptors(df : pandas.DataFrame) -> Dict:
'dst_dev': dst_device_uuid, 'dst_port': src_device_uuid,
'total_capacity_gbps': total_capacity_gbps, 'used_capacity_gbps': used_capacity_gbps,
}
LOGGER.info(' Found {:d} devices and {:d} links...'.format(len(devices), len(links)))
LOGGER.info('Composing Descriptors...')
_context = json_context('admin', name='admin')
_topology = json_topology('admin', name='admin', context_id=_context['context_id'])
descriptor = {
'dummy_mode': True, # inject the descriptors directly into the Context component
'contexts': [_context],
'topologies': [_topology],
'devices': [
json_device_emulated_packet_router_disabled(
device_uuid, name=device_uuid, config_rules=json_device_emulated_connect_rules([
json_endpoint_descriptor(endpoint_uuid, 'copper', endpoint_name=endpoint_uuid)
device_uuid, name=device_uuid, endpoints=[
json_endpoint(json_device_id(device_uuid), endpoint_uuid, 'copper')
for endpoint_uuid in device_data['endpoints']
]))
], config_rules=json_device_emulated_connect_rules([]))
for device_uuid,device_data in devices.items()
],
'links': [
......@@ -109,4 +127,5 @@ def compose_descriptors(df : pandas.DataFrame) -> Dict:
for link_uuid,link_data in links.items()
],
}
LOGGER.info(' DONE')
return descriptor
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment