diff --git a/src/analytics/database/AnalyzerModel.py b/src/analytics/database/AnalyzerModel.py
index 0b66e04d016a5759d9beb812b4338e10e884323b..7832052690c3dd5bc25617ef9143f263f0747843 100644
--- a/src/analytics/database/AnalyzerModel.py
+++ b/src/analytics/database/AnalyzerModel.py
@@ -15,7 +15,7 @@
 import logging
 import enum
 
-from sqlalchemy     import Column, String, Float, Enum
+from sqlalchemy     import Column, String, Float, Enum, BigInteger, JSON
 from sqlalchemy.orm import registry
 from common.proto   import analytics_frontend_pb2
 from common.proto   import kpi_manager_pb2
@@ -36,23 +36,25 @@ class AnalyzerOperationMode (enum.Enum):
 class Analyzer(Base):
     __tablename__ = 'analyzer'
 
-    analyzer_id           = Column(UUID(as_uuid=False)        , primary_key=True)
-    algorithm_name        = Column(String                     , nullable=False)
-    input_kpi_ids         = Column(ARRAY(UUID(as_uuid=False)) , nullable=False)
-    output_kpi_ids        = Column(ARRAY(UUID(as_uuid=False)) , nullable=False)
-    operation_mode        = Column(Enum(AnalyzerOperationMode), nullable=False)
-    batch_min_duration_s  = Column(Float                      , nullable=False)
-    batch_max_duration_s  = Column(Float                      , nullable=False)
-    bacth_min_size        = Column(Float                      , nullable=False)
-    bacth_max_size        = Column(Float                      , nullable=False)
+    analyzer_id           = Column( UUID(as_uuid=False)        , primary_key=True)
+    algorithm_name        = Column( String                     , nullable=False  )
+    input_kpi_ids         = Column( ARRAY(UUID(as_uuid=False)) , nullable=False  )
+    output_kpi_ids        = Column( ARRAY(UUID(as_uuid=False)) , nullable=False  )
+    operation_mode        = Column( Enum(AnalyzerOperationMode), nullable=False  )
+    parameters            = Column( JSON                       , nullable=True   )
+    batch_min_duration_s  = Column( Float                      , nullable=False  )
+    batch_max_duration_s  = Column( Float                      , nullable=False  )
+    batch_min_size        = Column( BigInteger                 , nullable=False  )
+    batch_max_size        = Column( BigInteger                 , nullable=False  )
 
     # helps in logging the information
     def __repr__(self):
-        return (f"<Analyzer(analyzer_id='{self.analyzer_id}',       algorithm_name='{self.algorithm_name}', "
-                f"input_kpi_ids={self.input_kpi_ids},               output_kpi_ids={self.output_kpi_ids}, "
-                f"operation_mode='{self.operation_mode}',           batch_min_duration_s={self.batch_min_duration_s}, "
-                f"batch_max_duration_s={self.batch_max_duration_s}, bacth_min_size={self.bacth_min_size}, "
-                f"bacth_max_size={self.bacth_max_size})>")
+            return (f"<Analyzer(analyzer_id='{self.analyzer_id}'       , algorithm_name='{self.algorithm_name}', "
+                    f"input_kpi_ids={self.input_kpi_ids}               , output_kpi_ids={self.output_kpi_ids}, "
+                    f"operation_mode='{self.operation_mode}'           , parameters={self.parameters}, "
+                    f"batch_min_duration_s={self.batch_min_duration_s} , batch_max_duration_s={self.batch_max_duration_s}, "
+                    f"batch_min_size={self.batch_min_size}             , batch_max_size={self.batch_max_size})>")
+
 
     @classmethod
     def ConvertAnalyzerToRow(cls, request):
@@ -67,10 +69,11 @@ class Analyzer(Base):
             input_kpi_ids        = [k.kpi_id.uuid for k in request.input_kpi_ids],
             output_kpi_ids       = [k.kpi_id.uuid for k in request.output_kpi_ids],
             operation_mode       = AnalyzerOperationMode(request.operation_mode),   # converts integer to coresponding Enum class member
+            parameters           = dict(request.parameters),
             batch_min_duration_s = request.batch_min_duration_s,
             batch_max_duration_s = request.batch_max_duration_s,
-            bacth_min_size       = request.batch_min_size,
-            bacth_max_size       = request.batch_max_size
+            batch_min_size       = request.batch_min_size,
+            batch_max_size       = request.batch_max_size
         )
 
     @classmethod
@@ -85,17 +88,19 @@ class Analyzer(Base):
         response.analyzer_id.analyzer_id.uuid = row.analyzer_id
         response.algorithm_name               = row.algorithm_name
         response.operation_mode               = row.operation_mode
+        response.parameters.update(row.parameters)
         
-        _kpi_id = kpi_manager_pb2.KpiId()
         for input_kpi_id in row.input_kpi_ids:
+            _kpi_id = kpi_manager_pb2.KpiId()
             _kpi_id.kpi_id.uuid = input_kpi_id
             response.input_kpi_ids.append(_kpi_id)
         for output_kpi_id in row.output_kpi_ids:
+            _kpi_id = kpi_manager_pb2.KpiId()
             _kpi_id.kpi_id.uuid = output_kpi_id
             response.output_kpi_ids.append(_kpi_id)
 
         response.batch_min_duration_s = row.batch_min_duration_s
         response.batch_max_duration_s = row.batch_max_duration_s
-        response.batch_min_size       = row.bacth_min_size
-        response.batch_max_size       = row.bacth_max_size
+        response.batch_min_size       = row.batch_min_size
+        response.batch_max_size       = row.batch_max_size
         return response
diff --git a/src/analytics/frontend/tests/messages.py b/src/analytics/frontend/tests/messages.py
index 0a8300436c55b10120bba3c2c0b4c67f8ae03a7a..4ffbb0b8e3a90e4ea889e39f983567a0c5921027 100644
--- a/src/analytics/frontend/tests/messages.py
+++ b/src/analytics/frontend/tests/messages.py
@@ -13,6 +13,7 @@
 # limitations under the License.
 
 import uuid
+import json
 from common.proto.kpi_manager_pb2        import KpiId
 from common.proto.analytics_frontend_pb2 import ( AnalyzerOperationMode, AnalyzerId,
                                                 Analyzer, AnalyzerFilter )
@@ -26,7 +27,7 @@ def create_analyzer_id():
 def create_analyzer():
     _create_analyzer                              = Analyzer()
     _create_analyzer.analyzer_id.analyzer_id.uuid = str(uuid.uuid4())
-    _create_analyzer.algorithm_name               = "some_algo_name"
+    _create_analyzer.algorithm_name               = "Test_Aggergate_and_Threshold"
     _create_analyzer.operation_mode               = AnalyzerOperationMode.ANALYZEROPERATIONMODE_STREAMING
     
     _kpi_id = KpiId()
@@ -44,6 +45,14 @@ def create_analyzer():
     _create_analyzer.output_kpi_ids.append(_kpi_id)
     _kpi_id.kpi_id.uuid              = str(uuid.uuid4())
     _create_analyzer.output_kpi_ids.append(_kpi_id)
+    # parameter
+    _threshold_dict = {
+        'avg_value'   :(20, 30), 'min_value'   :(00, 10), 'max_value'   :(45, 50),
+        'first_value' :(00, 10), 'last_value'  :(40, 50), 'stddev_value':(00, 10)}
+    _create_analyzer.parameters['thresholds']      = json.dumps(_threshold_dict)
+    _create_analyzer.parameters['window_size']     = "60 seconds"     # Such as "10 seconds", "2 minutes", "3 hours", "4 days" or "5 weeks" 
+    _create_analyzer.parameters['window_slider']   = "30 seconds"     # should be less than window size
+    _create_analyzer.parameters['store_aggregate'] = str(False)       # TRUE to store. No implemented yet
 
     return _create_analyzer
 
diff --git a/src/analytics/frontend/tests/test_frontend.py b/src/analytics/frontend/tests/test_frontend.py
index ae7875b863e52fdf9a57ed1124dfc294a0a87e8a..df6ce165e4ae16444c2d0472121677f899356ba4 100644
--- a/src/analytics/frontend/tests/test_frontend.py
+++ b/src/analytics/frontend/tests/test_frontend.py
@@ -76,7 +76,7 @@ def analyticsFrontend_client(analyticsFrontend_service : AnalyticsFrontendServic
 ###########################
 
 # ----- core funtionality test -----
-def test_StartAnalytic(analyticsFrontend_client):
+def test_StartAnalytics(analyticsFrontend_client):
     LOGGER.info(' >>> test_StartAnalytic START: <<< ')
     response = analyticsFrontend_client.StartAnalyzer(create_analyzer())
     LOGGER.debug(str(response))