diff --git a/deploy/crdb.sh b/deploy/crdb.sh index 474c32ef7d644dd57f528fbb5d8e13cc5521a53b..3e80b6350e66ec30a725c45acb7cf954ac3009c8 100755 --- a/deploy/crdb.sh +++ b/deploy/crdb.sh @@ -171,9 +171,6 @@ function crdb_drop_database_single() { kubectl exec -i --namespace ${CRDB_NAMESPACE} cockroachdb-0 -- \ ./cockroach sql --certs-dir=/cockroach/cockroach-certs --url=${CRDB_CLIENT_URL} \ --execute "DROP DATABASE IF EXISTS ${CRDB_DATABASE};" - kubectl exec -i --namespace ${CRDB_NAMESPACE} cockroachdb-0 -- \ - ./cockroach sql --certs-dir=/cockroach/cockroach-certs --url=${CRDB_CLIENT_URL} \ - --execute "DROP DATABASE IF EXISTS ${CRDB_DATABASE_QOSPROFILE};" echo } diff --git a/deploy/tfs.sh b/deploy/tfs.sh index ea9f9091dc8301a1ff5df0a5bb3a81e27b1737c1..65c1e8de28f2045b2ac78938b84d3c33e282025e 100755 --- a/deploy/tfs.sh +++ b/deploy/tfs.sh @@ -163,7 +163,6 @@ kubectl create secret generic kfk-kpi-data --namespace ${TFS_K8S_NAMESPACE} --ty --from-literal=KFK_SERVER_PORT=${KFK_SERVER_PORT} printf "\n" - echo "Create secret with NATS data" NATS_CLIENT_PORT=$(kubectl --namespace ${NATS_NAMESPACE} get service ${NATS_NAMESPACE} -o 'jsonpath={.spec.ports[?(@.name=="client")].port}') if [ -z "$NATS_CLIENT_PORT" ]; then diff --git a/my_deploy.sh b/my_deploy.sh index ad9a2f14377084226d91cd730f6da856b9a57e52..6d0a488c293f8109262cbb0adcd8c94e3cdbca04 100755 --- a/my_deploy.sh +++ b/my_deploy.sh @@ -28,6 +28,9 @@ export TFS_COMPONENTS="context device pathcomp service slice nbi webui load_gene # Uncomment to activate Monitoring Framework (new) #export TFS_COMPONENTS="${TFS_COMPONENTS} kpi_manager kpi_value_writer kpi_value_api telemetry analytics automation" +# Uncomment to activate QoS Profiles +#export TFS_COMPONENTS="${TFS_COMPONENTS} qos_profile" + # Uncomment to activate BGP-LS Speaker #export TFS_COMPONENTS="${TFS_COMPONENTS} bgpls_speaker" diff --git a/src/qos_profile/client/QoSProfileClient.py b/src/qos_profile/client/QoSProfileClient.py index c6ca46dce8f47ae5ff993ff0d2789f7ba3136631..748b3f208cc44e80c2e7b88f163f937328249633 100644 --- a/src/qos_profile/client/QoSProfileClient.py +++ b/src/qos_profile/client/QoSProfileClient.py @@ -88,4 +88,4 @@ class QoSProfileClient: LOGGER.debug('GetConstraintListFromQoSProfile request: {:s}'.format(grpc_message_to_json_string(request))) response = self.stub.GetConstraintListFromQoSProfile(request) LOGGER.debug('GetConstraintListFromQoSProfile result: {:s}'.format(grpc_message_to_json_string(response))) - return response \ No newline at end of file + return response diff --git a/src/qos_profile/requirements.in b/src/qos_profile/requirements.in index 9ea7059c4508029762700a2d523b2d25844d7a41..3e98fef362277dbf60019902e115d1c733bea9e7 100644 --- a/src/qos_profile/requirements.in +++ b/src/qos_profile/requirements.in @@ -12,9 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. - - psycopg2-binary==2.9.* SQLAlchemy==1.4.* sqlalchemy-cockroachdb==1.4.* -SQLAlchemy-Utils==0.38.* \ No newline at end of file +SQLAlchemy-Utils==0.38.* diff --git a/src/qos_profile/service/__main__.py b/src/qos_profile/service/__main__.py index d734d5567444a283da87eaa90ffb0c225f41165e..7f9e6de92b3ddf24e46a53f478bf90046e32d523 100644 --- a/src/qos_profile/service/__main__.py +++ b/src/qos_profile/service/__main__.py @@ -12,47 +12,35 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os import logging, signal, sys, threading from prometheus_client import start_http_server -from common.Constants import ServiceNameEnum -from common.Settings import ( - ENVVAR_SUFIX_SERVICE_HOST, ENVVAR_SUFIX_SERVICE_PORT_GRPC, get_env_var_name, get_log_level, get_metrics_port, - wait_for_environment_variables -) +from common.Settings import get_log_level, get_metrics_port from common.tools.database.GenericDatabase import Database -from qos_profile.service.database.models.QoSProfile import QoSProfileModel from .QoSProfileService import QoSProfileService +from .database.models.QoSProfile import QoSProfileModel + +LOG_LEVEL = get_log_level() +logging.basicConfig(level=LOG_LEVEL, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s") +LOGGER = logging.getLogger(__name__) + terminate = threading.Event() -LOGGER : logging.Logger = None -def signal_handler(signal, frame): # pylint: disable=redefined-outer-name +def signal_handler(signal, frame): # pylint: disable=redefined-outer-name,unused-argument LOGGER.warning('Terminate signal received') terminate.set() def main(): - global LOGGER # pylint: disable=global-statement - - log_level = get_log_level() - logging.basicConfig(level=log_level, format="[%(asctime)s] %(levelname)s:%(name)s:%(message)s") - LOGGER = logging.getLogger(__name__) - - wait_for_environment_variables([ - get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_HOST ), - get_env_var_name(ServiceNameEnum.CONTEXT, ENVVAR_SUFIX_SERVICE_PORT_GRPC), - ]) - + LOGGER.info('Starting...') signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) - LOGGER.info('Starting...') - # Start metrics server metrics_port = get_metrics_port() start_http_server(metrics_port) - db_manager = Database(db_name=os.getenv('CRDB_DATABASE'), model=QoSProfileModel) + # Get Database Engine instance and initialize database, if needed + db_manager = Database(QoSProfileModel) try: db_manager.create_database() @@ -61,7 +49,7 @@ def main(): LOGGER.exception('Failed to check/create the database: {:s}'.format(str(db_manager.db_engine.url))) raise e - # Starting service service + # Starting service grpc_service = QoSProfileService(db_manager.db_engine) grpc_service.start() diff --git a/src/qos_profile/service/database/models/_Base.py b/src/qos_profile/service/database/models/_Base.py index 6e71b3c0f94d68fa718a8ac11210fc03f5ed9ff9..d94dad3cdfc4dad473cc12eb00d502b05595b8f4 100644 --- a/src/qos_profile/service/database/models/_Base.py +++ b/src/qos_profile/service/database/models/_Base.py @@ -13,62 +13,10 @@ # limitations under the License. import sqlalchemy -from typing import Any, List -from sqlalchemy.orm import Session, sessionmaker, declarative_base -from sqlalchemy.sql import text -from sqlalchemy_cockroachdb import run_transaction +from sqlalchemy.orm import declarative_base _Base = declarative_base() -def create_performance_enhancers(db_engine : sqlalchemy.engine.Engine) -> None: - def index_storing( - index_name : str, table_name : str, index_fields : List[str], storing_fields : List[str] - ) -> Any: - str_index_fields = ','.join(['"{:s}"'.format(index_field) for index_field in index_fields]) - str_storing_fields = ','.join(['"{:s}"'.format(storing_field) for storing_field in storing_fields]) - INDEX_STORING = 'CREATE INDEX IF NOT EXISTS {:s} ON "{:s}" ({:s}) STORING ({:s});' - return text(INDEX_STORING.format(index_name, table_name, str_index_fields, str_storing_fields)) - - statements = [ - index_storing('device_configrule_device_uuid_rec_idx', 'device_configrule', ['device_uuid'], [ - 'position', 'kind', 'action', 'data', 'created_at', 'updated_at' - ]), - index_storing('service_configrule_service_uuid_rec_idx', 'service_configrule', ['service_uuid'], [ - 'position', 'kind', 'action', 'data', 'created_at', 'updated_at' - ]), - index_storing('slice_configrule_slice_uuid_rec_idx', 'slice_configrule', ['slice_uuid'], [ - 'position', 'kind', 'action', 'data', 'created_at', 'updated_at' - ]), - index_storing('connection_service_uuid_rec_idx', 'connection', ['service_uuid'], [ - 'settings', 'created_at', 'updated_at' - ]), - index_storing('service_constraint_service_uuid_rec_idx', 'service_constraint', ['service_uuid'], [ - 'position', 'kind', 'data', 'created_at', 'updated_at' - ]), - index_storing('slice_constraint_slice_uuid_rec_idx', 'slice_constraint', ['slice_uuid'], [ - 'position', 'kind', 'data', 'created_at', 'updated_at' - ]), - index_storing('endpoint_device_uuid_rec_idx', 'endpoint', ['device_uuid'], [ - 'topology_uuid', 'name', 'endpoint_type', 'kpi_sample_types', 'created_at', 'updated_at' - ]), - index_storing('qos_profile_context_uuid_rec_idx', 'qos_profile', ['context_uuid'], [ - 'service_name', 'service_type', 'service_status', 'created_at', 'updated_at' - ]), - index_storing('slice_context_uuid_rec_idx', 'slice', ['context_uuid'], [ - 'slice_name', 'slice_status', 'slice_owner_uuid', 'slice_owner_string', 'created_at', 'updated_at' - ]), - index_storing('topology_context_uuid_rec_idx', 'topology', ['context_uuid'], [ - 'topology_name', 'created_at', 'updated_at' - ]), - index_storing('device_component_idx', 'device_component', ['device_uuid'], [ - 'name', 'type', 'attributes', 'created_at', 'updated_at' - ]), - ] - def callback(session : Session) -> bool: - for stmt in statements: session.execute(stmt) - run_transaction(sessionmaker(bind=db_engine), callback) - def rebuild_database(db_engine : sqlalchemy.engine.Engine, drop_if_exists : bool = False): if drop_if_exists: _Base.metadata.drop_all(db_engine) _Base.metadata.create_all(db_engine) - # create_performance_enhancers(db_engine)