diff --git a/parse_csv.py b/parse_csv.py
new file mode 100644
index 0000000000000000000000000000000000000000..fc21c6a127eed7ce848549f56ea4c9a4523b5779
--- /dev/null
+++ b/parse_csv.py
@@ -0,0 +1,49 @@
+import calendar, pandas
+from datetime import datetime, timezone
+
+def time_datetime_to_int(dt_time : datetime) -> int:
+    return int(calendar.timegm(dt_time.timetuple()))
+
+def time_datetime_to_float(dt_time : datetime) -> float:
+    return time_datetime_to_int(dt_time) + (dt_time.microsecond / 1.e6)
+
+def time_utc_now_to_datetime() -> datetime:
+    return datetime.now(tz=timezone.utc)
+
+def time_utc_now_to_float() -> float:
+    return time_datetime_to_float(time_utc_now_to_datetime())
+
+def read_csv(csv_file : str) -> pandas.DataFrame:
+    df = pandas.read_csv(csv_file)
+
+    if 'dataset.csv' in csv_file:
+        df.rename(columns={'linkid': 'link_id', 'ds': 'timestamp', 'y': 'used_capacity_gbps'}, inplace=True)
+        df[['source', 'destination']] = df['link_id'].str.split('_', expand=True)
+    elif 'dataset2.csv' in csv_file:
+        df.drop(columns=['Unnamed: 0'], inplace=True)
+        df.rename(columns={
+            'target': 'destination', 'id': 'link_id', 'ds': 'timestamp', 'demandValue': 'used_capacity_gbps'
+        }, inplace=True)
+
+    df['timestamp'] = pandas.to_datetime(df['timestamp'])
+    max_timestamp = time_datetime_to_int(df['timestamp'].max())
+    now_timestamp = time_datetime_to_int(datetime.now(tz=timezone.utc))
+    df['timestamp'] = df['timestamp'] + pandas.offsets.Second(now_timestamp - max_timestamp)
+    df.sort_values('timestamp', ascending=True, inplace=True)
+    return df
+
+CSV_FILE = 'data/forecaster_data/dataset.csv'
+#CSV_FILE = 'data/forecaster_data/dataset2.csv'
+
+def main():
+    df = read_csv(CSV_FILE)
+    #print(df)
+
+    #print(df.groupby(['source', 'destination']).count())
+
+    links = df.groupby(['source', 'destination']).all()
+    for link in links:
+        print(link)
+
+if __name__ == '__main__':
+    main()
diff --git a/src/forecaster/service/TODO.txt b/src/forecaster/service/TODO.txt
index eb3cf3652e62c383a8f9c1e440d4ba1267740940..95d3fba03084bd1104d49abafc8eb426af5d8c35 100644
--- a/src/forecaster/service/TODO.txt
+++ b/src/forecaster/service/TODO.txt
@@ -1,29 +1,10 @@
-Pseudocode for RPC method `ComputeTopologyForecast`:
-```python
-  # Setting to configure the ratio between requested forecast and amount of historical data to be used for the forecast.
-  # E.g., if forecast window is 1 week, compute forecast based on 10 weeks of historical data.
-  FORECAST_TO_HISTORY_RATIO = 10
+test case:
+- instantiate Context and Monitoring
+- populate topology
+- populate monitoring timeseries with link utilizations
+- run test for link
+- run test for topology
 
-  history_window_seconds = FORECAST_TO_HISTORY_RATIO * request.forecast_window_seconds
-
-  forecast_reply = ForecastTopologyCapacityReply()
-
-  topology = context_client.GetTopology(topology_id)
-  for link_id in topology.link_ids:
-    link = context_client.GetLink(link_id)
-
-    used_capacity_history_gbps = monitoring_client.GetKPIValue(link_id, KPI.LinkUsedCapacity, window=history_window_seconds)
-    forecast_used_capacity_gbps = compute_forecast(used_capacity_history_gbps, forecast_window_seconds)
-
-    forecast_reply.link_capacities.append(ForecastLinkCapacityReply(
-      link_id=link_id,
-      total_capacity_gbps=link.total_capacity_gbps,
-      current_used_capacity_gbps=link.used_capacity_gbps,
-      forecast_used_capacity_gbps=forecast_used_capacity_gbps
-    ))
-
-  return forecast_reply
-```
 
 ## PathComp Impact
 After retrieving the topology, if the service has a duration constraint configured, the PathComp component should interrogate the Forecaster and request a topology forecast according to the requested duration of the service. The computed link capacity forecast should be used as link capacity in path computations.