diff --git a/scripts/run_tests_locally-telemetry-backend.sh b/scripts/run_tests_locally-telemetry-backend.sh
index 0afeccb308b3dc3c961d200abc553561c90ba829..745d77c62849fb946e37e0d3177c94e162bdc6af 100755
--- a/scripts/run_tests_locally-telemetry-backend.sh
+++ b/scripts/run_tests_locally-telemetry-backend.sh
@@ -22,6 +22,7 @@ cd $PROJECTDIR/src
 #     kpi_manager/tests/test_unitary.py
 
 # python3 kpi_manager/tests/test_unitary.py
+export KFK_SERVER_ADDRESS='127.0.0.1:9092'
 CRDB_SQL_ADDRESS=$(kubectl get service cockroachdb-public --namespace crdb -o jsonpath='{.spec.clusterIP}')
 export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_kpi_mgmt?sslmode=require"
 RCFILE=$PROJECTDIR/coverage/.coveragerc
diff --git a/scripts/run_tests_locally-telemetry-frontend.sh b/scripts/run_tests_locally-telemetry-frontend.sh
index 8e0989eca6bca3bbef309f8e92a6b63f3a3de70b..a6447cb4c6bfaa6d80fefac8417df28a960b1943 100755
--- a/scripts/run_tests_locally-telemetry-frontend.sh
+++ b/scripts/run_tests_locally-telemetry-frontend.sh
@@ -19,6 +19,7 @@ PROJECTDIR=`pwd`
 cd $PROJECTDIR/src
 
 # python3 kpi_manager/tests/test_unitary.py
+export KFK_SERVER_ADDRESS='127.0.0.1:9092'
 CRDB_SQL_ADDRESS=$(kubectl get service cockroachdb-public --namespace crdb -o jsonpath='{.spec.clusterIP}')
 export CRDB_URI="cockroachdb://tfs:tfs123@${CRDB_SQL_ADDRESS}:26257/tfs_kpi_mgmt?sslmode=require"
 RCFILE=$PROJECTDIR/coverage/.coveragerc
diff --git a/src/analytics/backend/tests/test_backend.py b/src/analytics/backend/tests/test_backend.py
index 470729160c75fd7491e58191f534db9f4da61806..79d760f8ec06e0f28e9ce98ffd5ee460a5c66c37 100644
--- a/src/analytics/backend/tests/test_backend.py
+++ b/src/analytics/backend/tests/test_backend.py
@@ -119,20 +119,20 @@ LOGGER = logging.getLogger(__name__)
 #     assert isinstance(response, bool)
 
 # --- To TEST StartRequestListenerFunctionality
-def test_StartRequestListener():
-    LOGGER.info('test_RunRequestListener')
-    AnalyticsBackendServiceObj = AnalyticsBackendService()
-    AnalyticsBackendServiceObj.stop_event = Event()
-    listener_thread = Thread(target=AnalyticsBackendServiceObj.RequestListener, args=())
-    listener_thread.start()
+# def test_StartRequestListener():
+#     LOGGER.info('test_RunRequestListener')
+#     AnalyticsBackendServiceObj = AnalyticsBackendService()
+#     AnalyticsBackendServiceObj.stop_event = Event()
+#     listener_thread = Thread(target=AnalyticsBackendServiceObj.RequestListener, args=())
+#     listener_thread.start()
 
-    time.sleep(100)
+#     time.sleep(100)
 
     # AnalyticsBackendServiceObj.stop_event.set()
     # LOGGER.info('Backend termination initiated. waiting for termination... 10 seconds')
     # listener_thread.join(timeout=10)
     # assert not listener_thread.is_alive(), "RequestListener thread did not terminate as expected."
-    LOGGER.info('Completed test_RunRequestListener')
+    # LOGGER.info('Completed test_RunRequestListener')
 
 # To test START and STOP communication together
 # def test_StopRequestListener():
diff --git a/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py b/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py
index 9ffacecc30fac40bb4899b8889386bc23a7609ac..e304d7acb1f1346128dc1f9268034af6b93185da 100644
--- a/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py
+++ b/src/analytics/frontend/service/AnalyticsFrontendServiceServicerImpl.py
@@ -12,7 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import logging, grpc, json
+import logging, grpc, json, queue
 
 from typing          import Dict
 from confluent_kafka import Producer as KafkaProducer
@@ -24,6 +24,8 @@ from common.proto.analytics_frontend_pb2      import Analyzer, AnalyzerId, Analy
 from common.proto.analytics_frontend_pb2_grpc import AnalyticsFrontendServiceServicer
 from analytics.database.Analyzer_DB           import AnalyzerDB
 from analytics.database.AnalyzerModel         import Analyzer as AnalyzerModel
+from apscheduler.schedulers.background        import BackgroundScheduler
+from apscheduler.triggers.interval            import IntervalTrigger
 
 
 LOGGER           = logging.getLogger(__name__)
diff --git a/src/analytics/frontend/tests/test_frontend.py b/src/analytics/frontend/tests/test_frontend.py
index 6a126905c1308250defaa0d84b292be354fff1cd..3898ec65e81b18214dc9cb014222312de3a4fd19 100644
--- a/src/analytics/frontend/tests/test_frontend.py
+++ b/src/analytics/frontend/tests/test_frontend.py
@@ -84,10 +84,10 @@ def analyticsFrontend_client(analyticsFrontend_service : AnalyticsFrontendServic
 ###########################
 
 # --- "test_validate_kafka_topics" should be executed before the functionality tests ---
-# def test_validate_kafka_topics():
-#     LOGGER.debug(" >>> test_validate_kafka_topics: START <<< ")
-#     response = KafkaTopic.create_all_topics()
-#     assert isinstance(response, bool)
+def test_validate_kafka_topics():
+    LOGGER.debug(" >>> test_validate_kafka_topics: START <<< ")
+    response = KafkaTopic.create_all_topics()
+    assert isinstance(response, bool)
 
 # ----- core funtionality test -----
 # def test_StartAnalytics(analyticsFrontend_client):
diff --git a/src/analytics/tests/test_analytics_db.py b/src/analytics/tests/test_analytics_db.py
index 58e7d0167044bb461e66b053dcb3999641ea8419..2794edb4a051b38d4cef902fd09aaad5db966179 100644
--- a/src/analytics/tests/test_analytics_db.py
+++ b/src/analytics/tests/test_analytics_db.py
@@ -15,12 +15,13 @@
 
 import logging
 from analytics.database.Analyzer_DB import AnalyzerDB
+from analytics.database.AnalyzerModel import Analyzer
 
 LOGGER = logging.getLogger(__name__)
 
 def test_verify_databases_and_tables():
     LOGGER.info('>>> test_verify_databases_and_tables : START <<< ')
-    AnalyzerDBobj = AnalyzerDB()
+    AnalyzerDBobj = AnalyzerDB(Analyzer)
     # AnalyzerDBobj.drop_database()
     # AnalyzerDBobj.verify_tables()
     AnalyzerDBobj.create_database()
diff --git a/src/telemetry/backend/tests/test_backend.py b/src/telemetry/backend/tests/test_backend.py
index 4764d7f5f10aefe211bae840f06eed9c82386bf8..3314477168bd0a2b20fc8d6dda5f82fb84016a32 100644
--- a/src/telemetry/backend/tests/test_backend.py
+++ b/src/telemetry/backend/tests/test_backend.py
@@ -26,10 +26,10 @@ LOGGER = logging.getLogger(__name__)
 ###########################
 
 # --- "test_validate_kafka_topics" should be run before the functionality tests ---
-# def test_validate_kafka_topics():
-#     LOGGER.debug(" >>> test_validate_kafka_topics: START <<< ")
-#     response = KafkaTopic.create_all_topics()
-#     assert isinstance(response, bool)
+def test_validate_kafka_topics():
+    LOGGER.debug(" >>> test_validate_kafka_topics: START <<< ")
+    response = KafkaTopic.create_all_topics()
+    assert isinstance(response, bool)
 
 def test_RunRequestListener():
     LOGGER.info('test_RunRequestListener')
diff --git a/src/telemetry/tests/test_telemetryDB.py b/src/telemetry/tests/test_telemetryDB.py
index 1b122e4bca266018c01044e2eb8a1ab277b3e3c3..bbc02a2a22fbbae3a1064fc5f9606ec8b29ff0f9 100644
--- a/src/telemetry/tests/test_telemetryDB.py
+++ b/src/telemetry/tests/test_telemetryDB.py
@@ -15,12 +15,13 @@
 
 import logging
 from telemetry.database.Telemetry_DB import TelemetryDB
+from telemetry.database.TelemetryModel import Collector as CollectorModel
 
 LOGGER = logging.getLogger(__name__)
 
 def test_verify_databases_and_tables():
     LOGGER.info('>>> test_verify_databases_and_tables : START <<< ')
-    TelemetryDBobj = TelemetryDB()
+    TelemetryDBobj = TelemetryDB(CollectorModel)
     # TelemetryDBobj.drop_database()
     # TelemetryDBobj.verify_tables()
     TelemetryDBobj.create_database()