diff --git a/scripts/run_tests_locally-telemetry-DB.sh b/scripts/run_tests_locally-telemetry-DB.sh
index bb1c48b76440c00b398875a8f704c2a82ba4ab50..4b9a417603cc42a4e7e8b19c7394cc38633817fa 100755
--- a/scripts/run_tests_locally-telemetry-DB.sh
+++ b/scripts/run_tests_locally-telemetry-DB.sh
@@ -22,5 +22,5 @@ cd $PROJECTDIR/src
 #     kpi_manager/tests/test_unitary.py
 
 RCFILE=$PROJECTDIR/coverage/.coveragerc
-python3 -m pytest --log-cli-level=INFO --verbose \
-    telemetry/database/tests/telemetryDBtests.py
+python3 -m pytest --log-level=DEBUG --log-cli-level=debug --verbose \
+    telemetry/tests/test_telemetryDB.py
diff --git a/src/telemetry/database/TelemetryEngine.py b/src/telemetry/database/TelemetryEngine.py
index bd7cda5996779858b8819b7f67a223803c2933c1..965b7c38dc2c45b3913a75877e74061d5264e629 100644
--- a/src/telemetry/database/TelemetryEngine.py
+++ b/src/telemetry/database/TelemetryEngine.py
@@ -27,7 +27,7 @@ class TelemetryEngine:
         if crdb_uri is None:        
             CRDB_NAMESPACE = "crdb"
             CRDB_SQL_PORT  = "26257"
-            CRDB_DATABASE  = "telemetryfrontend"
+            CRDB_DATABASE  = "tfs-telemetry"
             CRDB_USERNAME  = "tfs"
             CRDB_PASSWORD  = "tfs123"
             CRDB_SSLMODE   = "require"
@@ -35,7 +35,7 @@ class TelemetryEngine:
                     CRDB_USERNAME, CRDB_PASSWORD, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE)
         try:
             engine = sqlalchemy.create_engine(crdb_uri, echo=False)
-            LOGGER.info(' TelemetryDBmanager initalized with DB URL: {:}'.format(crdb_uri))
+            LOGGER.info(' TelemetryDB initalized with DB URL: {:}'.format(crdb_uri))
         except: # pylint: disable=bare-except # pragma: no cover
             LOGGER.exception('Failed to connect to database: {:s}'.format(str(crdb_uri)))
             return None # type: ignore
diff --git a/src/telemetry/database/TelemetryDB.py b/src/telemetry/database/Telemetry_DB.py
similarity index 96%
rename from src/telemetry/database/TelemetryDB.py
rename to src/telemetry/database/Telemetry_DB.py
index b5b0c4c7e31f0616c91503c6f4e1dbd7abead5c9..ec7da9e400bde6f34d6c1b14b509e0426eab1dad 100644
--- a/src/telemetry/database/TelemetryDB.py
+++ b/src/telemetry/database/Telemetry_DB.py
@@ -14,6 +14,7 @@
 
 import logging
 import sqlalchemy_utils
+from sqlalchemy import inspect
 from sqlalchemy.orm import sessionmaker
 from telemetry.database.TelemetryModel import Collector as CollectorModel
 from telemetry.database.TelemetryEngine import TelemetryEngine
@@ -23,7 +24,7 @@ from common.method_wrappers.ServiceExceptions import (
 LOGGER = logging.getLogger(__name__)
 DB_NAME = "tfs_telemetry"
 
-class TelemetryDBmanager:
+class TelemetryDB:
     def __init__(self):
         self.db_engine = TelemetryEngine.get_engine()
         if self.db_engine is None:
@@ -51,10 +52,9 @@ class TelemetryDBmanager:
 
     def verify_tables(self):
         try:
-            with self.db_engine.connect() as connection:
-                result = connection.execute("SHOW TABLES;")
-                tables = result.fetchall()
-                LOGGER.info("Tables in DB: {:}".format(tables))
+            inspect_object = inspect(self.db_engine)
+            if(inspect_object.has_table('collector', None)):
+                LOGGER.info("Table exists in DB: {:}".format(self.db_name))
         except Exception as e:
             LOGGER.info("Unable to fetch Table names. {:s}".format(str(e)))
 
diff --git a/src/telemetry/database/tests/__init__.py b/src/telemetry/database/tests/__init__.py
deleted file mode 100644
index 839e45e3b646bc60de7edd81fcfb91b7b38feadf..0000000000000000000000000000000000000000
--- a/src/telemetry/database/tests/__init__.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
\ No newline at end of file
diff --git a/src/telemetry/database/tests/telemetryDBtests.py b/src/telemetry/database/tests/telemetryDBtests.py
deleted file mode 100644
index 0d221106419d6e4ee4b313adf10c90c5e6be7666..0000000000000000000000000000000000000000
--- a/src/telemetry/database/tests/telemetryDBtests.py
+++ /dev/null
@@ -1,86 +0,0 @@
-
-# Copyright 2022-2024 ETSI OSG/SDG TeraFlowSDN (TFS) (https://tfs.etsi.org/)
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import logging
-from typing import Any
-from sqlalchemy.ext.declarative import declarative_base
-from telemetry.database.TelemetryDBmanager import TelemetryDBmanager
-from telemetry.database.TelemetryEngine import TelemetryEngine
-from telemetry.database.tests import temp_DB
-from .messages import create_kpi_request, create_collector_request, \
-                        create_kpi_id_request, create_kpi_filter_request, \
-                        create_collector_id_request, create_collector_filter_request
-
-logging.basicConfig(level=logging.INFO)
-LOGGER = logging.getLogger(__name__)
-
-
-# def test_temp_DB():
-#     temp_DB.main()
-
-def test_telemetry_object_creation():
-    LOGGER.info('--- test_telemetry_object_creation: START')
-
-    LOGGER.info('>>> Creating TelemetryDBmanager Object <<< ')
-    TelemetryDBmanagerObj = TelemetryDBmanager()
-    TelemetryEngine.create_database(TelemetryDBmanagerObj.db_engine)        # creates 'frontend' db, if it doesnot exists.
-
-    LOGGER.info('>>> Creating database <<< ')
-    TelemetryDBmanagerObj.create_database()
-
-    LOGGER.info('>>> verifing database <<< ')
-    TelemetryDBmanagerObj.list_databases()
-
-    # # LOGGER.info('>>> Droping Tables: ')
-    # # TelemetryDBmanagerObj.drop_table("table_naem_here")
-
-    LOGGER.info('>>> Creating Tables <<< ')
-    TelemetryDBmanagerObj.create_tables()
-
-    LOGGER.info('>>> Verifing Table creation <<< ')
-    TelemetryDBmanagerObj.verify_tables()
-
-    # LOGGER.info('>>> TESTING: Row Insertion Operation: kpi Table <<<')
-    # kpi_obj = create_kpi_request()
-    # TelemetryDBmanagerObj.inser_kpi(kpi_obj)
-
-    # LOGGER.info('>>> TESTING: Row Insertion Operation: collector Table <<<')
-    # collector_obj = create_collector_request()
-    # TelemetryDBmanagerObj.insert_collector(collector_obj)
-
-    # LOGGER.info('>>> TESTING: Get KpiDescriptor  <<<')
-    # kpi_id_obj = create_kpi_id_request()
-    # TelemetryDBmanagerObj.get_kpi_descriptor(kpi_id_obj)
-
-    # LOGGER.info('>>> TESTING: Select Collector  <<<')
-    # collector_id_obj = create_collector_id_request()
-    # TelemetryDBmanagerObj.get_collector(collector_id_obj)
-
-    # LOGGER.info('>>> TESTING: Applying kpi filter  <<< ')
-    # kpi_filter : dict[str, Any] = create_kpi_filter_request()
-    # TelemetryDBmanagerObj.select_kpi_descriptor(**kpi_filter)
-
-    # LOGGER.info('>>> TESTING: Applying collector filter   <<<')
-    # collector_filter : dict[str, Any] = create_collector_filter_request()
-    # TelemetryDBmanagerObj.select_collector(**collector_filter)
-    
-    # LOGGER.info('>>> TESTING: Delete KpiDescriptor ')
-    # kpi_id_obj = create_kpi_id_request()
-    # TelemetryDBmanagerObj.delete_kpi_descriptor(kpi_id_obj)
-
-    # LOGGER.info('>>> TESTING: Delete Collector ')
-    # collector_id_obj = create_collector_id_request()
-    # TelemetryDBmanagerObj.delete_collector(collector_id_obj)
-    
\ No newline at end of file
diff --git a/src/telemetry/database/tests/temp_DB.py b/src/telemetry/database/tests/temp_DB.py
deleted file mode 100644
index 089d3542492c2da87b839416f7118749bb82caad..0000000000000000000000000000000000000000
--- a/src/telemetry/database/tests/temp_DB.py
+++ /dev/null
@@ -1,327 +0,0 @@
-from sqlalchemy import create_engine, Column, String, Integer, Text, Float, ForeignKey
-from sqlalchemy.ext.declarative import declarative_base
-from sqlalchemy.orm import sessionmaker, relationship
-from sqlalchemy.dialects.postgresql import UUID
-import logging
-
-LOGGER = logging.getLogger(__name__)
-Base = declarative_base()
-
-class Kpi(Base):
-    __tablename__ = 'kpi'
-
-    kpi_id          = Column(UUID(as_uuid=False), primary_key=True)
-    kpi_description = Column(Text)
-    kpi_sample_type = Column(Integer)
-    device_id       = Column(String)
-    endpoint_id     = Column(String)
-    service_id      = Column(String)
-    slice_id        = Column(String)
-    connection_id   = Column(String)
-    link_id         = Column(String)
-
-    collectors = relationship('Collector', back_populates='kpi')
-
-    def __repr__(self):
-        return (f"<Kpi(kpi_id='{self.kpi_id}', kpi_description='{self.kpi_description}', "
-                f"kpi_sample_type='{self.kpi_sample_type}', device_id='{self.device_id}', "
-                f"endpoint_id='{self.endpoint_id}', service_id='{self.service_id}', "
-                f"slice_id='{self.slice_id}', connection_id='{self.connection_id}', "
-                f"link_id='{self.link_id}')>")
-    
-class Collector(Base):
-    __tablename__ = 'collector'
-
-    collector_id        = Column(UUID(as_uuid=False), primary_key=True)
-    kpi_id              = Column(UUID(as_uuid=False), ForeignKey('kpi.kpi_id'))
-    collector           = Column(String)
-    sampling_duration_s = Column(Float)
-    sampling_interval_s = Column(Float)
-    start_timestamp     = Column(Float)
-    end_timestamp       = Column(Float)
-
-    kpi = relationship('Kpi', back_populates='collectors')
-
-    def __repr__(self):
-        return (f"<Collector(collector_id='{self.collector_id}', kpi_id='{self.kpi_id}', "
-                f"collector='{self.collector}', sampling_duration_s='{self.sampling_duration_s}', "
-                f"sampling_interval_s='{self.sampling_interval_s}', start_timestamp='{self.start_timestamp}', "
-                f"end_timestamp='{self.end_timestamp}')>")
-
-class DatabaseManager:
-    def __init__(self, db_url, db_name):
-        self.engine = create_engine(db_url)
-        self.db_name = db_name
-        self.Session = sessionmaker(bind=self.engine)
-        LOGGER.info("DatabaseManager initialized with DB URL: %s and DB Name: %s", db_url, db_name)
-
-    def create_database(self):
-        try:
-            with self.engine.connect() as connection:
-                connection.execute(f"CREATE DATABASE {self.db_name};")
-            LOGGER.info("Database '%s' created successfully.", self.db_name)
-        except Exception as e:
-            LOGGER.error("Error creating database '%s': %s", self.db_name, e)
-        finally:
-            LOGGER.info("create_database method execution finished.")
-
-    def create_tables(self):
-        try:
-            Base.metadata.create_all(self.engine)
-            LOGGER.info("Tables created successfully.")
-        except Exception as e:
-            LOGGER.error("Error creating tables: %s", e)
-        finally:
-            LOGGER.info("create_tables method execution finished.")
-
-    def verify_table_creation(self):
-        try:
-            with self.engine.connect() as connection:
-                result = connection.execute("SHOW TABLES;")
-                tables = result.fetchall()
-                LOGGER.info("Tables verified: %s", tables)
-                return tables
-        except Exception as e:
-            LOGGER.error("Error verifying table creation: %s", e)
-            return []
-        finally:
-            LOGGER.info("verify_table_creation method execution finished.")
-
-    def insert_row_kpi(self, kpi_data):
-        session = self.Session()
-        try:
-            new_kpi = Kpi(**kpi_data)
-            session.add(new_kpi)
-            session.commit()
-            LOGGER.info("Inserted row into KPI table: %s", kpi_data)
-        except Exception as e:
-            session.rollback()
-            LOGGER.error("Error inserting row into KPI table: %s", e)
-        finally:
-            session.close()
-            LOGGER.info("insert_row_kpi method execution finished.")
-
-    def insert_row_collector(self, collector_data):
-        session = self.Session()
-        try:
-            new_collector = Collector(**collector_data)
-            session.add(new_collector)
-            session.commit()
-            LOGGER.info("Inserted row into Collector table: %s", collector_data)
-        except Exception as e:
-            session.rollback()
-            LOGGER.error("Error inserting row into Collector table: %s", e)
-        finally:
-            session.close()
-            LOGGER.info("insert_row_collector method execution finished.")
-
-    def verify_insertion_kpi(self, kpi_id):
-        session = self.Session()
-        try:
-            kpi = session.query(Kpi).filter_by(kpi_id=kpi_id).first()
-            LOGGER.info("Verified insertion in KPI table for kpi_id: %s, Result: %s", kpi_id, kpi)
-            return kpi
-        except Exception as e:
-            LOGGER.error("Error verifying insertion in KPI table for kpi_id %s: %s", kpi_id, e)
-            return None
-        finally:
-            session.close()
-            LOGGER.info("verify_insertion_kpi method execution finished.")
-
-    def verify_insertion_collector(self, collector_id):
-        session = self.Session()
-        try:
-            collector = session.query(Collector).filter_by(collector_id=collector_id).first()
-            LOGGER.info("Verified insertion in Collector table for collector_id: %s, Result: %s", collector_id, collector)
-            return collector
-        except Exception as e:
-            LOGGER.error("Error verifying insertion in Collector table for collector_id %s: %s", collector_id, e)
-            return None
-        finally:
-            session.close()
-            LOGGER.info("verify_insertion_collector method execution finished.")
-
-    def get_all_kpi_rows(self):
-        session = self.Session()
-        try:
-            kpi_rows = session.query(Kpi).all()
-            LOGGER.info("Fetched all rows from KPI table: %s", kpi_rows)
-            return kpi_rows
-        except Exception as e:
-            LOGGER.error("Error fetching all rows from KPI table: %s", e)
-            return []
-        finally:
-            session.close()
-            LOGGER.info("get_all_kpi_rows method execution finished.")
-
-    def get_all_collector_rows(self):
-        session = self.Session()
-        try:
-            collector_rows = session.query(Collector).all()
-            LOGGER.info("Fetched all rows from Collector table: %s", collector_rows)
-            return collector_rows
-        except Exception as e:
-            LOGGER.error("Error fetching all rows from Collector table: %s", e)
-            return []
-        finally:
-            session.close()
-            LOGGER.info("get_all_collector_rows method execution finished.")
-
-    def get_filtered_kpi_rows(self, **filters):
-        session = self.Session()
-        try:
-            query = session.query(Kpi)
-            for column, value in filters.items():
-                query = query.filter(getattr(Kpi, column) == value)
-            result = query.all()
-            LOGGER.info("Fetched filtered rows from KPI table with filters ---------- : {:s}".format(str(result)))
-            return result
-        except NoResultFound:
-            LOGGER.warning("No results found in KPI table with filters %s", filters)
-            return []
-        except Exception as e:
-            LOGGER.error("Error fetching filtered rows from KPI table with filters %s: %s", filters, e)
-            return []
-        finally:
-            session.close()
-            LOGGER.info("get_filtered_kpi_rows method execution finished.")
-
-    def get_filtered_collector_rows(self, **filters):
-        session = self.Session()
-        try:
-            query = session.query(Collector)
-            for column, value in filters.items():
-                query = query.filter(getattr(Collector, column) == value)
-            result = query.all()
-            LOGGER.info("Fetched filtered rows from Collector table with filters %s: %s", filters, result)
-            return result
-        except NoResultFound:
-            LOGGER.warning("No results found in Collector table with filters %s", filters)
-            return []
-        except Exception as e:
-            LOGGER.error("Error fetching filtered rows from Collector table with filters %s: %s", filters, e)
-            return []
-        finally:
-            session.close()
-            LOGGER.info("get_filtered_collector_rows method execution finished.")
-
-    def delete_kpi_by_id(self, kpi_id):
-        session = self.Session()
-        try:
-            kpi = session.query(Kpi).filter_by(kpi_id=kpi_id).first()
-            if kpi:
-                session.delete(kpi)
-                session.commit()
-                LOGGER.info("Deleted KPI with kpi_id: %s", kpi_id)
-            else:
-                LOGGER.warning("KPI with kpi_id %s not found", kpi_id)
-        except SQLAlchemyError as e:
-            session.rollback()
-            LOGGER.error("Error deleting KPI with kpi_id %s: %s", kpi_id, e)
-        finally:
-            session.close()
-            LOGGER.info("delete_kpi_by_id method execution finished.")
-
-    def delete_collector_by_id(self, collector_id):
-        session = self.Session()
-        try:
-            collector = session.query(Collector).filter_by(collector_id=collector_id).first()
-            if collector:
-                session.delete(collector)
-                session.commit()
-                LOGGER.info("Deleted Collector with collector_id: %s", collector_id)
-            else:
-                LOGGER.warning("Collector with collector_id %s not found", collector_id)
-        except SQLAlchemyError as e:
-            session.rollback()
-            LOGGER.error("Error deleting Collector with collector_id %s: %s", collector_id, e)
-        finally:
-            session.close()
-            LOGGER.info("delete_collector_by_id method execution finished.")
-
-
-# Example Usage
-def main():
-    CRDB_SQL_PORT  = "26257"
-    CRDB_DATABASE  = "telemetryfrontend"
-    CRDB_USERNAME  = "tfs"
-    CRDB_PASSWORD  = "tfs123"
-    CRDB_SSLMODE   = "require"    
-    CRDB_URI_TEMPLATE = 'cockroachdb://{:s}:{:s}@127.0.0.1:{:s}/{:s}?sslmode={:s}'
-    crdb_uri = CRDB_URI_TEMPLATE.format(
-            CRDB_USERNAME, CRDB_PASSWORD, CRDB_SQL_PORT, CRDB_DATABASE, CRDB_SSLMODE)
-    # db_url = "cockroachdb://username:password@localhost:26257/"
-    # db_name = "yourdatabase"
-    db_manager = DatabaseManager(crdb_uri, CRDB_DATABASE)
-
-    # Create database
-    db_manager.create_database()
-
-    # Update db_url to include the new database name
-    db_manager.engine = create_engine(f"{crdb_uri}")
-    db_manager.Session = sessionmaker(bind=db_manager.engine)
-
-    # Create tables
-    db_manager.create_tables()
-
-    # Verify table creation
-    tables = db_manager.verify_table_creation()
-    LOGGER.info('Tables in the database: {:s}'.format(str(tables)))    
-
-    # Insert a row into the KPI table
-    kpi_data = {
-        'kpi_id': '123e4567-e89b-12d3-a456-426614174100',
-        'kpi_description': 'Sample KPI',
-        'kpi_sample_type': 1,
-        'device_id': 'device_1',
-        'endpoint_id': 'endpoint_1',
-        'service_id': 'service_1',
-        'slice_id': 'slice_1',
-        'connection_id': 'conn_1',
-        'link_id': 'link_1'
-    }
-    db_manager.insert_row_kpi(kpi_data)
-
-    # Insert a row into the Collector table
-    collector_data = {
-        'collector_id': '123e4567-e89b-12d3-a456-426614174101',
-        'kpi_id': '123e4567-e89b-12d3-a456-426614174000',
-        'collector': 'Collector 1',
-        'sampling_duration_s': 60.0,
-        'sampling_interval_s': 10.0,
-        'start_timestamp': 1625247600.0,
-        'end_timestamp': 1625247660.0
-    }
-    db_manager.insert_row_collector(collector_data)
-
-    # Verify insertion into KPI table
-    kpi = db_manager.verify_insertion_kpi('123e4567-e89b-12d3-a456-426614174000')
-    print("Inserted KPI:", kpi)
-
-    # Verify insertion into Collector table
-    collector = db_manager.verify_insertion_collector('123e4567-e89b-12d3-a456-426614174001')
-    print("Inserted Collector:", collector)
-
-    # Get all rows from KPI table
-    all_kpi_rows = db_manager.get_all_kpi_rows()
-    LOGGER.info("All KPI Rows: %s", all_kpi_rows)
-
-    # Get all rows from Collector table
-    all_collector_rows = db_manager.get_all_collector_rows()
-    LOGGER.info("All Collector Rows: %s", all_collector_rows)
-
-    # Get filtered rows from KPI table
-    filtered_kpi_rows = db_manager.get_filtered_kpi_rows(kpi_description='Sample KPI')
-    LOGGER.info("Filtered KPI Rows: %s", filtered_kpi_rows)
-
-    # Get filtered rows from Collector table
-    filtered_collector_rows = db_manager.get_filtered_collector_rows(collector='Collector 1')
-    LOGGER.info("Filtered Collector Rows: %s", filtered_collector_rows)
-
-    # Delete a KPI by kpi_id
-    kpi_id_to_delete = '123e4567-e89b-12d3-a456-426614174000'
-    db_manager.delete_kpi_by_id(kpi_id_to_delete)
-
-    # Delete a Collector by collector_id
-    collector_id_to_delete = '123e4567-e89b-12d3-a456-426614174001'
-    db_manager.delete_collector_by_id(collector_id_to_delete)
diff --git a/src/telemetry/database/tests/messages.py b/src/telemetry/tests/messages.py
similarity index 100%
rename from src/telemetry/database/tests/messages.py
rename to src/telemetry/tests/messages.py
diff --git a/src/telemetry/database/tests/managementDBtests.py b/src/telemetry/tests/test_telemetryDB.py
similarity index 59%
rename from src/telemetry/database/tests/managementDBtests.py
rename to src/telemetry/tests/test_telemetryDB.py
index 24138abe42be742bd9b16d7840343f9d7c7fe133..c4976f8c2144fcdcad43a3e25d43091010de0d18 100644
--- a/src/telemetry/database/tests/managementDBtests.py
+++ b/src/telemetry/tests/test_telemetryDB.py
@@ -13,10 +13,16 @@
 # limitations under the License.
 
 
-from telemetry.database.managementDB import managementDB
-from telemetry.database.tests.messages import create_collector_model_object
+import logging
+from telemetry.database.Telemetry_DB import TelemetryDB
 
+LOGGER = logging.getLogger(__name__)
 
-def test_add_row_to_db():
-    managementDBobj = managementDB()
-    managementDBobj.add_row_to_db(create_collector_model_object())
\ No newline at end of file
+def test_verify_databases_and_tables():
+    LOGGER.info('>>> test_verify_databases_and_tables : START <<< ')
+    TelemetryDBobj = TelemetryDB()
+    TelemetryDBobj.drop_database()
+    TelemetryDBobj.verify_tables()
+    TelemetryDBobj.create_database()
+    TelemetryDBobj.create_tables()
+    TelemetryDBobj.verify_tables()
\ No newline at end of file