diff --git a/deploy/tfs.sh b/deploy/tfs.sh
index 38acd8b75de600fda4cae6f47c28aa603cc75c1d..ea9f9091dc8301a1ff5df0a5bb3a81e27b1737c1 100755
--- a/deploy/tfs.sh
+++ b/deploy/tfs.sh
@@ -163,15 +163,6 @@ kubectl create secret generic kfk-kpi-data --namespace ${TFS_K8S_NAMESPACE} --ty
     --from-literal=KFK_SERVER_PORT=${KFK_SERVER_PORT}
 printf "\n"
 
-echo "Create secret with CockroachDB data for QoSProfile"
-kubectl create secret generic crdb-qos-profile-data --namespace ${TFS_K8S_NAMESPACE} --type='Opaque' \
-    --from-literal=CRDB_NAMESPACE=${CRDB_NAMESPACE} \
-    --from-literal=CRDB_SQL_PORT=${CRDB_SQL_PORT} \
-    --from-literal=CRDB_DATABASE=${CRDB_DATABASE_QOSPROFILE} \
-    --from-literal=CRDB_USERNAME=${CRDB_USERNAME} \
-    --from-literal=CRDB_PASSWORD=${CRDB_PASSWORD} \
-    --from-literal=CRDB_SSLMODE=require
-printf "\n"
 
 echo "Create secret with NATS data"
 NATS_CLIENT_PORT=$(kubectl --namespace ${NATS_NAMESPACE} get service ${NATS_NAMESPACE} -o 'jsonpath={.spec.ports[?(@.name=="client")].port}')
diff --git a/manifests/qos_profileservice.yaml b/manifests/qos_profileservice.yaml
index f6838b7171ae5e012bda4821c1848dacdef4e03a..801607880bbcd9a51bacbec396f797dda7132d81 100644
--- a/manifests/qos_profileservice.yaml
+++ b/manifests/qos_profileservice.yaml
@@ -23,6 +23,8 @@ spec:
   #replicas: 1
   template:
     metadata:
+      annotations:
+        config.linkerd.io/skip-outbound-ports: "4222"
       labels:
         app: qos-profileservice
     spec:
@@ -37,9 +39,11 @@ spec:
           env:
             - name: LOG_LEVEL
               value: "INFO"
+            - name: CRDB_DATABASE
+              value: "tfs_qos_profile"
           envFrom:
             - secretRef:
-                name: crdb-qos-profile-data
+                name: crdb-data
           readinessProbe:
             exec:
               command: ["/bin/grpc_health_probe", "-addr=:20040"]
@@ -72,4 +76,26 @@ spec:
     - name: metrics
       protocol: TCP
       port: 9192
-      targetPort: 9192
\ No newline at end of file
+      targetPort: 9192
+---
+apiVersion: autoscaling/v2
+kind: HorizontalPodAutoscaler
+metadata:
+  name: qos-profileservice-hpa
+spec:
+  scaleTargetRef:
+    apiVersion: apps/v1
+    kind: Deployment
+    name: qos-profileservice
+  minReplicas: 1
+  maxReplicas: 20
+  metrics:
+    - type: Resource
+      resource:
+        name: cpu
+        target:
+          type: Utilization
+          averageUtilization: 80
+  #behavior:
+  #  scaleDown:
+  #    stabilizationWindowSeconds: 30
diff --git a/proto/qos_profile.proto b/proto/qos_profile.proto
index dc834876675e5677fa97e8fd62bb07fe5dc7213a..d032addf4889c8a7a19c260c23df6c74c8ffe55b 100644
--- a/proto/qos_profile.proto
+++ b/proto/qos_profile.proto
@@ -49,10 +49,10 @@ message QoSProfile {
 
 
 service QoSProfileService {
-  rpc CreateQoSProfile                (QoSProfile           ) returns (       QoSProfile                 ) {}
-  rpc UpdateQoSProfile                (QoSProfile           ) returns (       QoSProfile                 ) {}
-  rpc DeleteQoSProfile                (context.QoSProfileId ) returns (       context.Empty              ) {}
-  rpc GetQoSProfile                   (context.QoSProfileId ) returns (       QoSProfile                 ) {}
-  rpc GetQoSProfiles                  (context.Empty        ) returns (stream QoSProfile          ) {}
-  rpc GetConstraintListFromQoSProfile (QoDConstraintsRequest) returns (stream context.Constraint  ) {}
+  rpc CreateQoSProfile                (QoSProfile           ) returns (       QoSProfile        ) {}
+  rpc UpdateQoSProfile                (QoSProfile           ) returns (       QoSProfile        ) {}
+  rpc DeleteQoSProfile                (context.QoSProfileId ) returns (       context.Empty     ) {}
+  rpc GetQoSProfile                   (context.QoSProfileId ) returns (       QoSProfile        ) {}
+  rpc GetQoSProfiles                  (context.Empty        ) returns (stream QoSProfile        ) {}
+  rpc GetConstraintListFromQoSProfile (QoDConstraintsRequest) returns (stream context.Constraint) {}
 }