Skip to content
Snippets Groups Projects
Commit 5e23f50f authored by Lluis Gifre Renom's avatar Lluis Gifre Renom
Browse files

Common/tools/grps:

- Added new Event Collection and Dispatching framework
parent 31b46ce4
No related branches found
No related tags found
2 merge requests!294Release TeraFlowSDN 4.0,!282Resolve "(CTTC) Auto-start telemetry collection when a device endpoint is activated"
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# See usage example below
import grpc, logging, queue, threading, time
from typing import Any, Callable, List, Optional
from common.proto.context_pb2 import Empty
from common.tools.grpc.Tools import grpc_message_to_json_string
from context.client.ContextClient import ContextClient
LOGGER = logging.getLogger(__name__)
LOGGER.setLevel(logging.DEBUG)
class CollectorThread(threading.Thread):
def __init__(
self, subscription_func : Callable, events_queue = queue.PriorityQueue,
terminate = threading.Event, log_events_received: bool = False
) -> None:
super().__init__(daemon=False)
self._subscription_func = subscription_func
self._events_queue = events_queue
self._terminate = terminate
self._log_events_received = log_events_received
self._stream = None
def cancel(self) -> None:
if self._stream is None: return
self._stream.cancel()
def run(self) -> None:
while not self._terminate.is_set():
self._stream = self._subscription_func()
try:
for event in self._stream:
if self._log_events_received:
str_event = grpc_message_to_json_string(event)
LOGGER.info('[_collect] event: {:s}'.format(str_event))
timestamp = event.event.timestamp.timestamp
self._events_queue.put_nowait((timestamp, event))
except grpc.RpcError as e:
if e.code() == grpc.StatusCode.UNAVAILABLE: # pylint: disable=no-member
LOGGER.info('[_collect] UNAVAILABLE... retrying...')
time.sleep(0.5)
continue
elif e.code() == grpc.StatusCode.CANCELLED: # pylint: disable=no-member
break
else:
raise # pragma: no cover
class BaseEventCollector:
def __init__(
self, terminate : Optional[threading.Event] = None
) -> None:
self._events_queue = queue.PriorityQueue()
self._terminate = threading.Event() if terminate is None else terminate
self._collector_threads : List[CollectorThread] = list()
def install_collector(
self, subscription_method : Callable, request_message : Any,
log_events_received : bool = False
) -> None:
self._collector_threads.append(CollectorThread(
lambda: subscription_method(request_message),
self._events_queue, self._terminate, log_events_received
))
def start(self):
self._terminate.clear()
for collector_thread in self._collector_threads:
collector_thread.start()
def stop(self):
self._terminate.set()
for collector_thread in self._collector_threads:
collector_thread.cancel()
for collector_thread in self._collector_threads:
collector_thread.join()
def get_events_queue(self) -> queue.PriorityQueue:
return self._events_queue
def get_event(self, block : bool = True, timeout : float = 0.1):
try:
_,event = self._events_queue.get(block=block, timeout=timeout)
return event
except queue.Empty: # pylint: disable=catching-non-exception
return None
def get_events(self, block : bool = True, timeout : float = 0.1, count : int = None):
events = []
if count is None:
while not self._terminate.is_set():
event = self.get_event(block=block, timeout=timeout)
if event is None: break
events.append(event)
else:
while len(events) < count:
if self._terminate.is_set(): break
event = self.get_event(block=block, timeout=timeout)
if event is None: continue
events.append(event)
return sorted(events, key=lambda e: e.event.timestamp.timestamp)
def main() -> None:
logging.basicConfig(level=logging.INFO)
context_client = ContextClient()
context_client.connect()
event_collector = BaseEventCollector()
event_collector.install_collector(context_client.GetDeviceEvents, Empty(), log_events_received=True)
event_collector.install_collector(context_client.GetLinkEvents, Empty(), log_events_received=True)
event_collector.install_collector(context_client.GetServiceEvents, Empty(), log_events_received=True)
event_collector.start()
time.sleep(60)
event_collector.stop()
context_client.close()
if __name__ == '__main__':
main()
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# See usage example below
import logging, queue, threading, time
from typing import Any, Callable, Optional
from common.proto.context_pb2 import DeviceEvent, Empty, EventTypeEnum, LinkEvent
from common.tools.grpc.BaseEventCollector import BaseEventCollector
from common.tools.grpc.Tools import grpc_message_to_json_string
from context.client.ContextClient import ContextClient
LOGGER = logging.getLogger(__name__)
class BaseEventDispatcher(threading.Thread):
def __init__(
self, events_queue : queue.PriorityQueue,
terminate : Optional[threading.Event] = None
) -> None:
super().__init__(daemon=True)
self._events_queue = events_queue
self._terminate = threading.Event() if terminate is None else terminate
def stop(self):
self._terminate.set()
def _get_event(self, block : bool = True, timeout : Optional[float] = 0.5) -> Optional[Any]:
try:
_, event = self._events_queue.get(block=block, timeout=timeout)
return event
except queue.Empty:
return None
def _get_dispatcher(self, event : Any) -> Optional[Callable]:
object_name = str(event.__class__.__name__).lower().replace('event', '')
event_type = EventTypeEnum.Name(event.event.event_type).lower().replace('eventtype_', '')
method_name = 'dispatch_{:s}_{:s}'.format(object_name, event_type)
dispatcher = getattr(self, method_name, None)
if dispatcher is not None: return dispatcher
method_name = 'dispatch_{:s}'.format(object_name)
dispatcher = getattr(self, method_name, None)
if dispatcher is not None: return dispatcher
method_name = 'dispatch'
dispatcher = getattr(self, method_name, None)
if dispatcher is not None: return dispatcher
return None
def run(self) -> None:
while not self._terminate.is_set():
event = self._get_event()
if event is None: continue
dispatcher = self._get_dispatcher(event)
if dispatcher is None:
MSG = 'No dispatcher available for Event({:s})'
LOGGER.warning(MSG.format(grpc_message_to_json_string(event)))
continue
dispatcher(event)
class MyEventDispatcher(BaseEventDispatcher):
def dispatch_device_create(self, device_event : DeviceEvent) -> None:
MSG = 'Processing Device Create: {:s}'
LOGGER.info(MSG.format(grpc_message_to_json_string(device_event)))
def dispatch_device_update(self, device_event : DeviceEvent) -> None:
MSG = 'Processing Device Update: {:s}'
LOGGER.info(MSG.format(grpc_message_to_json_string(device_event)))
def dispatch_device_remove(self, device_event : DeviceEvent) -> None:
MSG = 'Processing Device Remove: {:s}'
LOGGER.info(MSG.format(grpc_message_to_json_string(device_event)))
def dispatch_link(self, link_event : LinkEvent) -> None:
MSG = 'Processing Link Create/Update/Remove: {:s}'
LOGGER.info(MSG.format(grpc_message_to_json_string(link_event)))
def dispatch(self, event : Any) -> None:
MSG = 'Processing any other Event: {:s}'
LOGGER.info(MSG.format(grpc_message_to_json_string(event)))
def main() -> None:
logging.basicConfig(level=logging.INFO)
context_client = ContextClient()
context_client.connect()
event_collector = BaseEventCollector()
event_collector.install_collector(context_client.GetDeviceEvents, Empty(), log_events_received=True)
event_collector.install_collector(context_client.GetLinkEvents, Empty(), log_events_received=True)
event_collector.install_collector(context_client.GetServiceEvents, Empty(), log_events_received=True)
event_collector.start()
event_dispatcher = MyEventDispatcher(event_collector.get_events_queue())
event_dispatcher.start()
time.sleep(60)
event_dispatcher.stop()
event_collector.stop()
context_client.close()
if __name__ == '__main__':
main()
# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging, threading, time
from typing import Optional
from common.proto.context_pb2 import DeviceEvent, Empty, ServiceEvent
from common.tools.grpc.BaseEventCollector import BaseEventCollector
from common.tools.grpc.BaseEventDispatcher import BaseEventDispatcher
from common.tools.grpc.Tools import grpc_message_to_json_string
from context.client.ContextClient import ContextClient
LOGGER = logging.getLogger(__name__)
class EventCollector(BaseEventCollector):
pass
class EventDispatcher(BaseEventDispatcher):
def dispatch_device_create(self, device_event : DeviceEvent) -> None:
MSG = 'Processing Device Create: {:s}'
LOGGER.info(MSG.format(grpc_message_to_json_string(device_event)))
def dispatch_device_update(self, device_event : DeviceEvent) -> None:
MSG = 'Processing Device Update: {:s}'
LOGGER.info(MSG.format(grpc_message_to_json_string(device_event)))
def dispatch_device_remove(self, device_event : DeviceEvent) -> None:
MSG = 'Processing Device Remove: {:s}'
LOGGER.info(MSG.format(grpc_message_to_json_string(device_event)))
def dispatch_service_create(self, service_event : ServiceEvent) -> None:
MSG = 'Processing Service Create: {:s}'
LOGGER.info(MSG.format(grpc_message_to_json_string(service_event)))
def dispatch_service_update(self, service_event : ServiceEvent) -> None:
MSG = 'Processing Service Update: {:s}'
LOGGER.info(MSG.format(grpc_message_to_json_string(service_event)))
def dispatch_service_remove(self, service_event : ServiceEvent) -> None:
MSG = 'Processing Service Remove: {:s}'
LOGGER.info(MSG.format(grpc_message_to_json_string(service_event)))
class ExampleEventEngine:
def __init__(
self, terminate : Optional[threading.Event] = None
) -> None:
self._terminate = threading.Event() if terminate is None else terminate
self._context_client = ContextClient()
self._event_collector = EventCollector(terminate=self._terminate)
self._event_collector.install_collector(
self._context_client.GetDeviceEvents, Empty(),
log_events_received=True
)
self._event_collector.install_collector(
self._context_client.GetLinkEvents, Empty(),
log_events_received=True
)
self._event_collector.install_collector(
self._context_client.GetServiceEvents, Empty(),
log_events_received=True
)
self._event_dispatcher = EventDispatcher(
self._event_collector.get_events_queue(),
terminate=self._terminate
)
def start(self) -> None:
self._context_client.connect()
self._event_collector.start()
self._event_dispatcher.start()
def stop(self) -> None:
self._terminate.set()
self._event_dispatcher.stop()
self._event_collector.stop()
self._context_client.close()
def main() -> None:
logging.basicConfig(level=logging.INFO)
event_engine = ExampleEventEngine()
event_engine.start()
time.sleep(60)
event_engine.stop()
if __name__ == '__main__':
main()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment