From 53ba0ee6de7ca289ac87d848c652182dbae07fe6 Mon Sep 17 00:00:00 2001
From: gifrerenom <lluis.gifre@cttc.es>
Date: Tue, 8 Oct 2024 12:22:24 +0000
Subject: [PATCH] Pre-merge code cleanup

---
 deploy/crdb.sh                                |  3 --
 deploy/tfs.sh                                 |  1 -
 my_deploy.sh                                  |  3 ++
 src/qos_profile/client/QoSProfileClient.py    |  2 +-
 src/qos_profile/requirements.in               |  4 +-
 src/qos_profile/service/__main__.py           | 36 +++++--------
 .../service/database/models/_Base.py          | 54 +------------------
 7 files changed, 18 insertions(+), 85 deletions(-)

diff --git a/deploy/crdb.sh b/deploy/crdb.sh
index 474c32ef7..3e80b6350 100755
--- a/deploy/crdb.sh
+++ b/deploy/crdb.sh
@@ -171,9 +171,6 @@ function crdb_drop_database_single() {
     kubectl exec -i --namespace ${CRDB_NAMESPACE} cockroachdb-0 -- \
         ./cockroach sql --certs-dir=/cockroach/cockroach-certs --url=${CRDB_CLIENT_URL} \
         --execute "DROP DATABASE IF EXISTS ${CRDB_DATABASE};"
-    kubectl exec -i --namespace ${CRDB_NAMESPACE} cockroachdb-0 -- \
-        ./cockroach sql --certs-dir=/cockroach/cockroach-certs --url=${CRDB_CLIENT_URL} \
-        --execute "DROP DATABASE IF EXISTS ${CRDB_DATABASE_QOSPROFILE};"
     echo
 }
 
diff --git a/deploy/tfs.sh b/deploy/tfs.sh
index ea9f9091d..65c1e8de2 100755
--- a/deploy/tfs.sh
+++ b/deploy/tfs.sh
@@ -163,7 +163,6 @@ kubectl create secret generic kfk-kpi-data --namespace ${TFS_K8S_NAMESPACE} --ty
     --from-literal=KFK_SERVER_PORT=${KFK_SERVER_PORT}
 printf "\n"
 
-
 echo "Create secret with NATS data"
 NATS_CLIENT_PORT=$(kubectl --namespace ${NATS_NAMESPACE} get service ${NATS_NAMESPACE} -o 'jsonpath={.spec.ports[?(@.name=="client")].port}')
 if [ -z "$NATS_CLIENT_PORT" ]; then
diff --git a/my_deploy.sh b/my_deploy.sh
index ad9a2f143..6d0a488c2 100755
--- a/my_deploy.sh
+++ b/my_deploy.sh
@@ -28,6 +28,9 @@ export TFS_COMPONENTS="context device pathcomp service slice nbi webui load_gene
 # Uncomment to activate Monitoring Framework (new)
 #export TFS_COMPONENTS="${TFS_COMPONENTS} kpi_manager kpi_value_writer kpi_value_api telemetry analytics automation"
 
+# Uncomment to activate QoS Profiles
+#export TFS_COMPONENTS="${TFS_COMPONENTS} qos_profile"
+
 # Uncomment to activate BGP-LS Speaker
 #export TFS_COMPONENTS="${TFS_COMPONENTS} bgpls_speaker"
 
diff --git a/src/qos_profile/client/QoSProfileClient.py b/src/qos_profile/client/QoSProfileClient.py
index c6ca46dce..748b3f208 100644
--- a/src/qos_profile/client/QoSProfileClient.py
+++ b/src/qos_profile/client/QoSProfileClient.py
@@ -88,4 +88,4 @@ class QoSProfileClient:
         LOGGER.debug('GetConstraintListFromQoSProfile request: {:s}'.format(grpc_message_to_json_string(request)))
         response = self.stub.GetConstraintListFromQoSProfile(request)
         LOGGER.debug('GetConstraintListFromQoSProfile result: {:s}'.format(grpc_message_to_json_string(response)))
-        return response
\ No newline at end of file
+        return response
diff --git a/src/qos_profile/requirements.in b/src/qos_profile/requirements.in
index 9ea7059c4..3e98fef36 100644
--- a/src/qos_profile/requirements.in
+++ b/src/qos_profile/requirements.in
@@ -12,9 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-
 psycopg2-binary==2.9.*
 SQLAlchemy==1.4.*
 sqlalchemy-cockroachdb==1.4.*
-SQLAlchemy-Utils==0.38.*
\ No newline at end of file
+SQLAlchemy-Utils==0.38.*
diff --git a/src/qos_profile/service/__main__.py b/src/qos_profile/service/__main__.py
index d734d5567..7f9e6de92 100644
--- a/src/qos_profile/service/__main__.py
+++ b/src/qos_profile/service/__main__.py
@@ -12,47 +12,35 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-import os
 import logging, signal, sys, threading
 from prometheus_client import start_http_server
-from common.Constants import ServiceNameEnum
-from common.Settings import (
-    ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_log_level, get_metrics_port,
-    wait_for_environment_variables
-)
+from common.Settings import get_log_level, get_metrics_port
 from common.tools.database.GenericDatabase import Database
-from qos_profile.service.database.models.QoSProfile import QoSProfileModel
 from .QoSProfileService import QoSProfileService
+from .database.models.QoSProfile import QoSProfileModel
+
+LOG_LEVEL = get_log_level()
+logging.basicConfig(level=LOG_LEVEL, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s")
+LOGGER = logging.getLogger(__name__)
+
 
 terminate = threading.Event()
-LOGGER : logging.Logger = None
 
-def signal_handler(signal, frame): # pylint: disable=redefined-outer-name
+def signal_handler(signal, frame): # pylint: disable=redefined-outer-name,unused-argument
     LOGGER.warning('Terminate signal received')
     terminate.set()
 
 def main():
-    global LOGGER # pylint: disable=global-statement
-
-    log_level = get_log_level()
-    logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s")
-    LOGGER = logging.getLogger(__name__)
-
-    wait_for_environment_variables([
-        get_env_var_name(ServiceNameEnum.CONTEXT,  ENVVAR_SUFIX_SERVICE_HOST     ),
-        get_env_var_name(ServiceNameEnum.CONTEXT,  ENVVAR_SUFIX_SERVICE_PORT_GRPC),
-    ])
-
+    LOGGER.info('Starting...')
     signal.signal(signal.SIGINT,  signal_handler)
     signal.signal(signal.SIGTERM, signal_handler)
 
-    LOGGER.info('Starting...')
-
     # Start metrics server
     metrics_port = get_metrics_port()
     start_http_server(metrics_port)
 
-    db_manager = Database(db_name=os.getenv('CRDB_DATABASE'), model=QoSProfileModel)
+    # Get Database Engine instance and initialize database, if needed
+    db_manager = Database(QoSProfileModel)
 
     try:
         db_manager.create_database()
@@ -61,7 +49,7 @@ def main():
         LOGGER.exception('Failed to check/create the database: {:s}'.format(str(db_manager.db_engine.url)))
         raise e
 
-    # Starting service service
+    # Starting service
     grpc_service = QoSProfileService(db_manager.db_engine)
     grpc_service.start()
 
diff --git a/src/qos_profile/service/database/models/_Base.py b/src/qos_profile/service/database/models/_Base.py
index 6e71b3c0f..d94dad3cd 100644
--- a/src/qos_profile/service/database/models/_Base.py
+++ b/src/qos_profile/service/database/models/_Base.py
@@ -13,62 +13,10 @@
 # limitations under the License.
 
 import sqlalchemy
-from typing import Any, List
-from sqlalchemy.orm import Session, sessionmaker, declarative_base
-from sqlalchemy.sql import text
-from sqlalchemy_cockroachdb import run_transaction
+from sqlalchemy.orm import declarative_base
 
 _Base = declarative_base()
 
-def create_performance_enhancers(db_engine : sqlalchemy.engine.Engine) -> None:
-    def index_storing(
-        index_name : str, table_name : str, index_fields : List[str], storing_fields : List[str]
-    ) -> Any:
-        str_index_fields = ','.join(['"{:s}"'.format(index_field) for index_field in index_fields])
-        str_storing_fields = ','.join(['"{:s}"'.format(storing_field) for storing_field in storing_fields])
-        INDEX_STORING = 'CREATE INDEX IF NOT EXISTS {:s} ON "{:s}" ({:s}) STORING ({:s});'
-        return text(INDEX_STORING.format(index_name, table_name, str_index_fields, str_storing_fields))
-
-    statements = [
-        index_storing('device_configrule_device_uuid_rec_idx', 'device_configrule', ['device_uuid'], [
-            'position', 'kind', 'action', 'data', 'created_at', 'updated_at'
-        ]),
-        index_storing('service_configrule_service_uuid_rec_idx', 'service_configrule', ['service_uuid'], [
-            'position', 'kind', 'action', 'data', 'created_at', 'updated_at'
-        ]),
-        index_storing('slice_configrule_slice_uuid_rec_idx', 'slice_configrule', ['slice_uuid'], [
-            'position', 'kind', 'action', 'data', 'created_at', 'updated_at'
-        ]),
-        index_storing('connection_service_uuid_rec_idx', 'connection', ['service_uuid'], [
-            'settings', 'created_at', 'updated_at'
-        ]),
-        index_storing('service_constraint_service_uuid_rec_idx', 'service_constraint', ['service_uuid'], [
-            'position', 'kind', 'data', 'created_at', 'updated_at'
-        ]),
-        index_storing('slice_constraint_slice_uuid_rec_idx', 'slice_constraint', ['slice_uuid'], [
-            'position', 'kind', 'data', 'created_at', 'updated_at'
-        ]),
-        index_storing('endpoint_device_uuid_rec_idx', 'endpoint', ['device_uuid'], [
-            'topology_uuid', 'name', 'endpoint_type', 'kpi_sample_types', 'created_at', 'updated_at'
-        ]),
-        index_storing('qos_profile_context_uuid_rec_idx', 'qos_profile', ['context_uuid'], [
-            'service_name', 'service_type', 'service_status', 'created_at', 'updated_at'
-        ]),
-        index_storing('slice_context_uuid_rec_idx', 'slice', ['context_uuid'], [
-            'slice_name', 'slice_status', 'slice_owner_uuid', 'slice_owner_string', 'created_at', 'updated_at'
-        ]),
-        index_storing('topology_context_uuid_rec_idx', 'topology', ['context_uuid'], [
-            'topology_name', 'created_at', 'updated_at'
-        ]),
-        index_storing('device_component_idx', 'device_component', ['device_uuid'], [
-            'name', 'type', 'attributes', 'created_at', 'updated_at'
-        ]),
-    ]
-    def callback(session : Session) -> bool:
-        for stmt in statements: session.execute(stmt)
-    run_transaction(sessionmaker(bind=db_engine), callback)
-
 def rebuild_database(db_engine : sqlalchemy.engine.Engine, drop_if_exists : bool = False):
     if drop_if_exists: _Base.metadata.drop_all(db_engine)
     _Base.metadata.create_all(db_engine)
-    # create_performance_enhancers(db_engine)
-- 
GitLab