diff --git a/.gitignore b/.gitignore
index 73358ad82e86b0ccf2133b18798a2cf27f15d6cf..32761fef9b6640f2b5f04157d2eeeb9fc6d23d37 100644
--- a/.gitignore
+++ b/.gitignore
@@ -49,6 +49,7 @@ coverage.xml
 *.py,cover
 .hypothesis/
 .pytest_cache/
+.benchmarks/
 cover/
 
 # Translations
@@ -85,7 +86,7 @@ ipython_config.py
 # pyenv
 #   For a library or package, you might want to ignore these files since the code is
 #   intended to run in multiple environments; otherwise, check them in:
-# .python-version
+.python-version
 
 # pipenv
 #   According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
@@ -117,6 +118,9 @@ venv.bak/
 .spyderproject
 .spyproject
 
+# VSCode project settings
+.vscode/
+
 # Rope project settings
 .ropeproject
 
diff --git a/manifests/contextservice.yaml b/manifests/contextservice.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..b05c37b4972246e4a1f4697885d48fe8fe209ef1
--- /dev/null
+++ b/manifests/contextservice.yaml
@@ -0,0 +1,53 @@
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: contextservice
+spec:
+  selector:
+    matchLabels:
+      app: contextservice
+  template:
+    metadata:
+      labels:
+        app: contextservice
+    spec:
+      terminationGracePeriodSeconds: 5
+      containers:
+      - name: server
+        image: context_service:develop
+        imagePullPolicy: Never
+        ports:
+        - containerPort: 7070
+        env:
+        - name: DB_ENGINE
+          value: "redis"
+        - name: REDISDB_DATABASE_ID
+          value: "0"
+        - name: LOG_LEVEL
+          value: "DEBUG"
+        readinessProbe:
+          exec:
+            command: ["/bin/grpc_health_probe", "-addr=:7070"]
+        livenessProbe:
+          exec:
+            command: ["/bin/grpc_health_probe", "-addr=:7070"]
+        resources:
+          requests:
+            cpu: 250m
+            memory: 512Mi
+          limits:
+            cpu: 700m
+            memory: 1024Mi
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: contextservice
+spec:
+  type: ClusterIP
+  selector:
+    app: contextservice
+  ports:
+  - name: grpc
+    port: 7070
+    targetPort: 7070
diff --git a/manifests/redisdb.yaml b/manifests/redisdb.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..b16c8282eec705055083fc8d09fead62591b7639
--- /dev/null
+++ b/manifests/redisdb.yaml
@@ -0,0 +1,38 @@
+---
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+  name: redisdb
+spec:
+  selector:
+    matchLabels:
+      app: redisdb
+  replicas: 1
+  template:
+    metadata:
+      labels:
+        app: redisdb
+        version: v1
+    spec:
+      containers:
+      - name: redisdb
+        image: redis:6.2
+        ports:
+        - containerPort: 6379
+---
+apiVersion: v1
+kind: Service
+metadata:
+  name: redisdb
+  labels:
+    app: redisdb
+spec:
+  type: ClusterIP
+  selector:
+    app: redisdb
+  ports:
+  - name: redisdb
+    protocol: TCP
+    port: 6379
+    targetPort: 6379
+---
diff --git a/src/build.sh b/src/build.sh
index b022ed8c3e38020c6a28e98ca61e8f32491b29d2..e5cb21fb1340056de9c362307809b80726d33d4d 100755
--- a/src/build.sh
+++ b/src/build.sh
@@ -1,5 +1,13 @@
 #!/usr/bin/env bash
 
+# Make folder containing the script the root folder for its execution
+cd $(dirname $0)
+
+echo "BUILD context"
+context/genproto.sh
+docker build -t "context_service:develop" -f context/Dockerfile_develop --quiet .
+docker build -t "context_service:test" -f context/Dockerfile_test --quiet .
+
 cd monitoring
 ./genproto.sh
 cd ..
@@ -7,3 +15,5 @@ cd ..
 echo "BUILD monitoring"
 docker build -t "monitoring:dockerfile" -f monitoring/Dockerfile .
 
+echo "Prune unused images"
+docker image prune --force
diff --git a/src/common/database/__init__.py b/src/common/database/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/common/database/inmemory/InMemoryDatabase.py b/src/common/database/inmemory/InMemoryDatabase.py
new file mode 100644
index 0000000000000000000000000000000000000000..59a2274a9785501ffb902015a066c5ae3792a1d8
--- /dev/null
+++ b/src/common/database/inmemory/InMemoryDatabase.py
@@ -0,0 +1,13 @@
+import logging, json
+
+LOGGER = logging.getLogger(__name__)
+
+FILEPATH = 'data/topo_nsfnet.json'
+
+class InMemoryDatabase:
+    def __init__(self, filepath=FILEPATH, **parameters):
+        with open(filepath, 'r') as f:
+            self.json_topology = json.loads(f.read())
+
+    def get_topology(self):
+        return(self.json_topology)
diff --git a/src/common/database/inmemory/__init__.py b/src/common/database/inmemory/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/common/database/redis/RedisDatabase.py b/src/common/database/redis/RedisDatabase.py
new file mode 100644
index 0000000000000000000000000000000000000000..38b0bf34b8db01bd2fc93712876ce93a56294fed
--- /dev/null
+++ b/src/common/database/redis/RedisDatabase.py
@@ -0,0 +1,62 @@
+import logging, json, os, redis
+#from .context_api.Context import Context
+
+LOGGER = logging.getLogger(__name__)
+
+# 60.0 seconds (aprox) in incremental steps from 0.002 till 29.99 seconds
+RETRY_DELAY_INITIAL = 0.002
+RETRY_DELAY_INCREMENT = 1.831
+MAX_RETRIES = 15
+
+URL_TEMPLATE = 'redis://{host}:{port}/{dbid}'
+
+FILEPATH = 'data/topo_nsfnet.json'
+
+class RedisDatabase:
+    def __init__(self, **parameters):
+        host = os.environ.get('REDISDB_SERVICE_HOST')
+        if(host is None): raise Exception('EnvironmentVariable(REDISDB_SERVICE_HOST) not found')
+        port = os.environ.get('REDISDB_SERVICE_PORT')
+        if(port is None): raise Exception('EnvironmentVariable(REDISDB_SERVICE_PORT) not found')
+        dbid = os.environ.get('REDISDB_DATABASE_ID')
+        if(dbid is None): raise Exception('EnvironmentVariable(REDISDB_DATABASE_ID) not found')
+        self.redis_url = URL_TEMPLATE.format(host=host, port=port, dbid=dbid)
+        self.handler = None
+        self.connect() # initialize self.handler and connect to server
+
+    def connect(self):
+        self.handler = redis.Redis.from_url(self.redis_url)
+
+    def close(self):
+        if(self.handler is not None): del self.handler
+        self.handler = None
+
+    def get_topology(self):
+        str_topology = self.handler.get('topology')
+        if(str_topology is None):
+            with open(FILEPATH, 'r') as f:
+                json_topology = json.loads(f.read())
+            str_topology = json.dumps(json_topology)
+            self.handler.setnx('topology', str_topology)
+            json_topology['source'] = 'redis missing, loaded from file'
+        else:
+            json_topology = json.loads(str_topology)
+            json_topology['source'] = 'redis found!'
+        return(json_topology)
+
+    #def __getattr__(self, method_name):
+    #    # Expose all methods in the database engine as owned
+    #    def method(*args, **kwargs):
+    #        num_try, delay = 1, RETRY_DELAY_INITIAL
+    #        while num_try <= MAX_RETRIES:
+    #            try:
+    #                handler_method = getattr(self.handler, method_name, None)
+    #                if handler_method is None: raise Exception('Redis does not support method({})'.format(method_name))
+    #                return handler_method(*args, **kwargs)
+    #            except ConnectionError: # as e:
+    #                #if('Channel is closed' not in str(e)): raise
+    #                self.connect() # Try to reconnect
+    #                time.sleep(delay)
+    #                num_try, delay = num_try + 1, delay * RETRY_DELAY_INCREMENT
+    #        raise Exception('Unable to reconnect to Redis after {}'.format(MAX_RETRIES))
+    #    return(method)
diff --git a/src/common/database/redis/__init__.py b/src/common/database/redis/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/common/tools/RetryDecorator.py b/src/common/tools/RetryDecorator.py
new file mode 100644
index 0000000000000000000000000000000000000000..d3f7f3e38c3e8b16afd39628bcfa4196f82b077e
--- /dev/null
+++ b/src/common/tools/RetryDecorator.py
@@ -0,0 +1,71 @@
+# This decorator re-executes the decorated function when it raises an exception. It enables to control the exception
+# classes that should trigger the re-execution, the maximum number of retries, the delay between retries, and set the
+# execution of a preparation method before every retry. The delay is specfied by means of user-customizable functions.
+#
+# Delay functions should return a compute function with a single parameter, the number of retry. For instance:
+#   delay_linear(initial=0, increment=0):
+#       adds a constant delay of 0 seconds between retries
+#   delay_linear(initial=1, increment=0):
+#       adds a constant delay of 1 second between retries
+#   delay_linear(initial=1, increment=0.5, maximum=10):
+#       adds an increasing delay between retries, starting with 1 second, and incresing it linearly by steps of 0.5
+#       seconds, up to 10 seconds, every time an exception is caught within the current execution.
+#       E.g. 1.0, 1.5, 2.0, 2.5, ..., 10.0, 10.0, 10.0, ...
+#   delay_exponential(initial=1, increment=1): adds a constant delay of 1 second between retries
+#   delay_exponential(initial=1, increment=2, maximum=10):
+#       adds an increasing delay between retries, starting with 1 second, and incresing it exponentially by steps of 2
+#       seconds, up to 10 seconds,  every time an exception is caught within the current execution.
+#       E.g. 1.0, 2.0, 4.0, 8.0, 10.0, 10.0, 10.0, ...
+
+# Arguments:
+# - exceptions: defines the set of exception classes to be catched for reconnection. Others are re-raised.
+#   By default all exceptions are re-raised.
+# - max_retries: defines the maximum number of retries acceptable before giving up. By default, 0 retries are executed.
+# - delay_function: defines the delay computation method to be used. By default, delay_linear with a fixed delay of 0.1
+#   seconds is used.
+# - prepare_method_name: if not None, defines the name of the preparation method within the same class to be executed
+#   when an exception in exceptions is caught, and before running the next retry. By default, is None, meaning that no
+#   method is executed.
+# - prepare_method_args: defines the list of positional arguments to be provided to the preparation method. If no
+#   preparation method is specified, the argument is silently ignored. By default, an empty list is defined.
+# - prepare_method_kwargs: defines the dictionary of keyword arguments to be provided to the preparation method. If no
+#   preparation method is specified, the argument is silently ignored. By default, an empty dictionary is defined.
+
+import time
+
+def delay_linear(initial=0, increment=0, maximum=None):
+    def compute(num_try):
+        delay = initial + (num_try - 1) * increment
+        if maximum is not None: delay = max(delay, maximum)
+        return delay
+    return compute
+
+def delay_exponential(initial=1, increment=1, maximum=None):
+    def compute(num_try):
+        delay = initial * (num_try - 1) ^ increment
+        if maximum is not None: delay = max(delay, maximum)
+        return delay
+    return compute
+
+def retry(exceptions=set(), max_retries=0, delay_function=delay_linear(initial=0, increment=0),
+          prepare_method_name=None, prepare_method_args=[], prepare_method_kwargs={}):
+    def _reconnect(func):
+        def wrapper(self, *args, **kwargs):
+            if prepare_method_name is not None:
+                prepare_method = getattr(self, prepare_method_name, None)
+                if prepare_method is None: raise Exception('Prepare Method ({}) not found'.format(prepare_method_name))
+            num_try, given_up = 0, False
+            while not given_up:
+                try:
+                    return func(self, *args, **kwargs)
+                except Exception as e:
+                    if not isinstance(e, tuple(exceptions)): raise
+
+                    num_try += 1
+                    given_up = num_try > max_retries
+                    if given_up: raise Exception('Giving up... {} tries failed'.format(max_retries))
+                    if delay_function is not None: time.sleep(delay_function(num_try))
+
+                    if prepare_method_name is not None: prepare_method(*prepare_method_args, **prepare_method_kwargs)
+        return(wrapper)
+    return(_reconnect)
diff --git a/src/common/tools/__init__.py b/src/common/tools/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/context/Config.py b/src/context/Config.py
new file mode 100644
index 0000000000000000000000000000000000000000..2f2114d6195e5979ad5db736748616091c087a04
--- /dev/null
+++ b/src/context/Config.py
@@ -0,0 +1,10 @@
+import logging
+
+# gRPC settings
+SERVICE_PORT = 7070
+MAX_WORKERS  = 10
+GRACE_PERIOD = 60
+LOG_LEVEL    = logging.WARNING
+
+# Prometheus settings
+METRICS_PORT = 8080
diff --git a/src/context/Dockerfile_develop b/src/context/Dockerfile_develop
new file mode 100644
index 0000000000000000000000000000000000000000..57175a3ab66df2ff96dc8964be9b93ab4a159913
--- /dev/null
+++ b/src/context/Dockerfile_develop
@@ -0,0 +1,35 @@
+FROM python:3-slim
+
+# Install dependencies
+RUN apt-get --yes --quiet --quiet update && \
+    apt-get --yes --quiet --quiet install wget g++ && \
+    rm -rf /var/lib/apt/lists/*
+
+# Set Python to show logs as they occur
+ENV PYTHONUNBUFFERED=0
+
+# Download the gRPC health probe
+RUN GRPC_HEALTH_PROBE_VERSION=v0.2.0 && \
+    wget -qO/bin/grpc_health_probe https://github.com/grpc-ecosystem/grpc-health-probe/releases/download/${GRPC_HEALTH_PROBE_VERSION}/grpc_health_probe-linux-amd64 && \
+    chmod +x /bin/grpc_health_probe
+
+# Get generic Python packages
+RUN python3 -m pip install --upgrade pip setuptools wheel pip-tools
+
+# Set working directory
+WORKDIR /var/teraflow
+
+# Create module sub-folders
+RUN mkdir -p /var/teraflow/context
+
+# Get Python packages per module
+COPY context/requirements.in context/requirements.in
+RUN pip-compile --output-file=context/requirements.txt context/requirements.in
+RUN python3 -m pip install -r context/requirements.in
+
+# Add files into working directory
+COPY common/. common
+COPY context/. context
+
+# Start context service
+ENTRYPOINT ["python", "-m", "context.service"]
diff --git a/src/context/Dockerfile_test b/src/context/Dockerfile_test
new file mode 100644
index 0000000000000000000000000000000000000000..5487a3ac9b1f5f72415d9dce37396675301a6822
--- /dev/null
+++ b/src/context/Dockerfile_test
@@ -0,0 +1,4 @@
+FROM context_service:develop
+
+# Run integration tests
+ENTRYPOINT ["pytest", "-v", "--log-level=DEBUG", "context/tests/test_integration.py"]
diff --git a/src/context/__init__.py b/src/context/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/context/client/ContextClient.py b/src/context/client/ContextClient.py
new file mode 100644
index 0000000000000000000000000000000000000000..83d511cd70fedeb266be3c53516a4be19c6b677f
--- /dev/null
+++ b/src/context/client/ContextClient.py
@@ -0,0 +1,35 @@
+import grpc, logging
+from google.protobuf.json_format import MessageToDict
+from common.tools.RetryDecorator import retry, delay_exponential
+from context.proto.context_pb2_grpc import ContextServiceStub
+
+LOGGER = logging.getLogger(__name__)
+MAX_RETRIES = 15
+DELAY_FUNCTION = delay_exponential(initial=0.01, increment=2.0, maximum=5.0)
+
+class ContextClient:
+    def __init__(self, address, port):
+        self.endpoint = '{}:{}'.format(address, port)
+        LOGGER.debug('Creating channel to {}...'.format(self.endpoint))
+        self.channel = None
+        self.stub = None
+        self.connect()
+        LOGGER.debug('Channel created')
+
+    def connect(self):
+        self.channel = grpc.insecure_channel(self.endpoint)
+        self.stub = ContextServiceStub(self.channel)
+
+    def close(self):
+        if(self.channel is not None): self.channel.close()
+        self.channel = None
+        self.stub = None
+
+    @retry(exceptions=set(), max_retries=MAX_RETRIES, delay_function=DELAY_FUNCTION, prepare_method_name='connect')
+    def GetTopology(self, request):
+        LOGGER.debug('GetTopology request: {}'.format(request))
+        response = self.stub.GetTopology(request)
+        LOGGER.debug('GetTopology result: {}'.format(response))
+        return MessageToDict(
+            response, including_default_value_fields=True, preserving_proto_field_name=True,
+            use_integers_for_enums=False)
diff --git a/src/context/client/__init__.py b/src/context/client/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/context/genproto.sh b/src/context/genproto.sh
new file mode 100755
index 0000000000000000000000000000000000000000..55d596b50a7aef5243eb293658292130ae336776
--- /dev/null
+++ b/src/context/genproto.sh
@@ -0,0 +1,28 @@
+#!/bin/bash -eu
+#
+# Copyright 2018 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#!/bin/bash -e
+
+# Make folder containing the script the root folder for its execution
+cd $(dirname $0)
+
+rm -rf proto/*.py
+touch proto/__init__.py
+
+python -m grpc_tools.protoc -I../../proto --python_out=proto --grpc_python_out=proto context.proto
+
+sed -i -E 's/(import\ .*)_pb2/from context.proto \1_pb2/g' proto/context_pb2.py
+sed -i -E 's/(import\ .*)_pb2/from context.proto \1_pb2/g' proto/context_pb2_grpc.py
diff --git a/src/context/proto/__init__.py b/src/context/proto/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/context/proto/context_pb2.py b/src/context/proto/context_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..e4acb11a579694017d1ee5572f1f94848731802a
--- /dev/null
+++ b/src/context/proto/context_pb2.py
@@ -0,0 +1,805 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: context.proto
+"""Generated protocol buffer code."""
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='context.proto',
+  package='context',
+  syntax='proto3',
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_pb=b'\n\rcontext.proto\x12\x07\x63ontext\"\x07\n\x05\x45mpty\"{\n\x07\x43ontext\x12%\n\tcontextId\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x1f\n\x04topo\x18\x02 \x01(\x0b\x32\x11.context.Topology\x12(\n\x03\x63tl\x18\x03 \x01(\x0b\x32\x1b.context.TeraFlowController\"/\n\tContextId\x12\"\n\x0b\x63ontextUuid\x18\x01 \x01(\x0b\x32\r.context.Uuid\"m\n\x08Topology\x12#\n\x06topoId\x18\x02 \x01(\x0b\x32\x13.context.TopologyId\x12\x1f\n\x06\x64\x65vice\x18\x03 \x03(\x0b\x32\x0f.context.Device\x12\x1b\n\x04link\x18\x04 \x03(\x0b\x32\r.context.Link\"1\n\x04Link\x12)\n\x0c\x65ndpointList\x18\x01 \x03(\x0b\x32\x13.context.EndPointId\"R\n\nTopologyId\x12%\n\tcontextId\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x1d\n\x06topoId\x18\x02 \x01(\x0b\x32\r.context.Uuid\"?\n\nConstraint\x12\x17\n\x0f\x63onstraint_type\x18\x01 \x01(\t\x12\x18\n\x10\x63onstraint_value\x18\x02 \x01(\t\"\xda\x01\n\x06\x44\x65vice\x12$\n\tdevice_id\x18\x01 \x01(\x0b\x32\x11.context.DeviceId\x12\x13\n\x0b\x64\x65vice_type\x18\x02 \x01(\t\x12,\n\rdevice_config\x18\x03 \x01(\x0b\x32\x15.context.DeviceConfig\x12>\n\x14\x64\x65vOperationalStatus\x18\x04 \x01(\x0e\x32 .context.DeviceOperationalStatus\x12\'\n\x0c\x65ndpointList\x18\x05 \x03(\x0b\x32\x11.context.EndPoint\"%\n\x0c\x44\x65viceConfig\x12\x15\n\rdevice_config\x18\x01 \x01(\t\"C\n\x08\x45ndPoint\x12$\n\x07port_id\x18\x01 \x01(\x0b\x32\x13.context.EndPointId\x12\x11\n\tport_type\x18\x02 \x01(\t\"t\n\nEndPointId\x12#\n\x06topoId\x18\x01 \x01(\x0b\x32\x13.context.TopologyId\x12!\n\x06\x64\x65v_id\x18\x02 \x01(\x0b\x32\x11.context.DeviceId\x12\x1e\n\x07port_id\x18\x03 \x01(\x0b\x32\r.context.Uuid\",\n\x08\x44\x65viceId\x12 \n\tdevice_id\x18\x01 \x01(\x0b\x32\r.context.Uuid\"\x14\n\x04Uuid\x12\x0c\n\x04uuid\x18\x01 \x01(\t\"K\n\x12TeraFlowController\x12\"\n\x06\x63tl_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x11\n\tipaddress\x18\x02 \x01(\t\"Q\n\x14\x41uthenticationResult\x12\"\n\x06\x63tl_id\x18\x01 \x01(\x0b\x32\x12.context.ContextId\x12\x15\n\rauthenticated\x18\x02 \x01(\x08*4\n\x17\x44\x65viceOperationalStatus\x12\x0c\n\x08\x44ISABLED\x10\x00\x12\x0b\n\x07\x45NABLED\x10\x01\x32\x44\n\x0e\x43ontextService\x12\x32\n\x0bGetTopology\x12\x0e.context.Empty\x1a\x11.context.Topology\"\x00\x62\x06proto3'
+)
+
+_DEVICEOPERATIONALSTATUS = _descriptor.EnumDescriptor(
+  name='DeviceOperationalStatus',
+  full_name='context.DeviceOperationalStatus',
+  filename=None,
+  file=DESCRIPTOR,
+  create_key=_descriptor._internal_create_key,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='DISABLED', index=0, number=0,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+    _descriptor.EnumValueDescriptor(
+      name='ENABLED', index=1, number=1,
+      serialized_options=None,
+      type=None,
+      create_key=_descriptor._internal_create_key),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=1195,
+  serialized_end=1247,
+)
+_sym_db.RegisterEnumDescriptor(_DEVICEOPERATIONALSTATUS)
+
+DeviceOperationalStatus = enum_type_wrapper.EnumTypeWrapper(_DEVICEOPERATIONALSTATUS)
+DISABLED = 0
+ENABLED = 1
+
+
+
+_EMPTY = _descriptor.Descriptor(
+  name='Empty',
+  full_name='context.Empty',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=26,
+  serialized_end=33,
+)
+
+
+_CONTEXT = _descriptor.Descriptor(
+  name='Context',
+  full_name='context.Context',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='contextId', full_name='context.Context.contextId', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='topo', full_name='context.Context.topo', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='ctl', full_name='context.Context.ctl', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=35,
+  serialized_end=158,
+)
+
+
+_CONTEXTID = _descriptor.Descriptor(
+  name='ContextId',
+  full_name='context.ContextId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='contextUuid', full_name='context.ContextId.contextUuid', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=160,
+  serialized_end=207,
+)
+
+
+_TOPOLOGY = _descriptor.Descriptor(
+  name='Topology',
+  full_name='context.Topology',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='topoId', full_name='context.Topology.topoId', index=0,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device', full_name='context.Topology.device', index=1,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='link', full_name='context.Topology.link', index=2,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=209,
+  serialized_end=318,
+)
+
+
+_LINK = _descriptor.Descriptor(
+  name='Link',
+  full_name='context.Link',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='endpointList', full_name='context.Link.endpointList', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=320,
+  serialized_end=369,
+)
+
+
+_TOPOLOGYID = _descriptor.Descriptor(
+  name='TopologyId',
+  full_name='context.TopologyId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='contextId', full_name='context.TopologyId.contextId', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='topoId', full_name='context.TopologyId.topoId', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=371,
+  serialized_end=453,
+)
+
+
+_CONSTRAINT = _descriptor.Descriptor(
+  name='Constraint',
+  full_name='context.Constraint',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='constraint_type', full_name='context.Constraint.constraint_type', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='constraint_value', full_name='context.Constraint.constraint_value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=455,
+  serialized_end=518,
+)
+
+
+_DEVICE = _descriptor.Descriptor(
+  name='Device',
+  full_name='context.Device',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.Device.device_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_type', full_name='context.Device.device_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='device_config', full_name='context.Device.device_config', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='devOperationalStatus', full_name='context.Device.devOperationalStatus', index=3,
+      number=4, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='endpointList', full_name='context.Device.endpointList', index=4,
+      number=5, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=521,
+  serialized_end=739,
+)
+
+
+_DEVICECONFIG = _descriptor.Descriptor(
+  name='DeviceConfig',
+  full_name='context.DeviceConfig',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='device_config', full_name='context.DeviceConfig.device_config', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=741,
+  serialized_end=778,
+)
+
+
+_ENDPOINT = _descriptor.Descriptor(
+  name='EndPoint',
+  full_name='context.EndPoint',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='port_id', full_name='context.EndPoint.port_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='port_type', full_name='context.EndPoint.port_type', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=780,
+  serialized_end=847,
+)
+
+
+_ENDPOINTID = _descriptor.Descriptor(
+  name='EndPointId',
+  full_name='context.EndPointId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='topoId', full_name='context.EndPointId.topoId', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='dev_id', full_name='context.EndPointId.dev_id', index=1,
+      number=2, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='port_id', full_name='context.EndPointId.port_id', index=2,
+      number=3, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=849,
+  serialized_end=965,
+)
+
+
+_DEVICEID = _descriptor.Descriptor(
+  name='DeviceId',
+  full_name='context.DeviceId',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='device_id', full_name='context.DeviceId.device_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=967,
+  serialized_end=1011,
+)
+
+
+_UUID = _descriptor.Descriptor(
+  name='Uuid',
+  full_name='context.Uuid',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='uuid', full_name='context.Uuid.uuid', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1013,
+  serialized_end=1033,
+)
+
+
+_TERAFLOWCONTROLLER = _descriptor.Descriptor(
+  name='TeraFlowController',
+  full_name='context.TeraFlowController',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='ctl_id', full_name='context.TeraFlowController.ctl_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='ipaddress', full_name='context.TeraFlowController.ipaddress', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1035,
+  serialized_end=1110,
+)
+
+
+_AUTHENTICATIONRESULT = _descriptor.Descriptor(
+  name='AuthenticationResult',
+  full_name='context.AuthenticationResult',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  create_key=_descriptor._internal_create_key,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='ctl_id', full_name='context.AuthenticationResult.ctl_id', index=0,
+      number=1, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+    _descriptor.FieldDescriptor(
+      name='authenticated', full_name='context.AuthenticationResult.authenticated', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR,  create_key=_descriptor._internal_create_key),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=1112,
+  serialized_end=1193,
+)
+
+_CONTEXT.fields_by_name['contextId'].message_type = _CONTEXTID
+_CONTEXT.fields_by_name['topo'].message_type = _TOPOLOGY
+_CONTEXT.fields_by_name['ctl'].message_type = _TERAFLOWCONTROLLER
+_CONTEXTID.fields_by_name['contextUuid'].message_type = _UUID
+_TOPOLOGY.fields_by_name['topoId'].message_type = _TOPOLOGYID
+_TOPOLOGY.fields_by_name['device'].message_type = _DEVICE
+_TOPOLOGY.fields_by_name['link'].message_type = _LINK
+_LINK.fields_by_name['endpointList'].message_type = _ENDPOINTID
+_TOPOLOGYID.fields_by_name['contextId'].message_type = _CONTEXTID
+_TOPOLOGYID.fields_by_name['topoId'].message_type = _UUID
+_DEVICE.fields_by_name['device_id'].message_type = _DEVICEID
+_DEVICE.fields_by_name['device_config'].message_type = _DEVICECONFIG
+_DEVICE.fields_by_name['devOperationalStatus'].enum_type = _DEVICEOPERATIONALSTATUS
+_DEVICE.fields_by_name['endpointList'].message_type = _ENDPOINT
+_ENDPOINT.fields_by_name['port_id'].message_type = _ENDPOINTID
+_ENDPOINTID.fields_by_name['topoId'].message_type = _TOPOLOGYID
+_ENDPOINTID.fields_by_name['dev_id'].message_type = _DEVICEID
+_ENDPOINTID.fields_by_name['port_id'].message_type = _UUID
+_DEVICEID.fields_by_name['device_id'].message_type = _UUID
+_TERAFLOWCONTROLLER.fields_by_name['ctl_id'].message_type = _CONTEXTID
+_AUTHENTICATIONRESULT.fields_by_name['ctl_id'].message_type = _CONTEXTID
+DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
+DESCRIPTOR.message_types_by_name['Context'] = _CONTEXT
+DESCRIPTOR.message_types_by_name['ContextId'] = _CONTEXTID
+DESCRIPTOR.message_types_by_name['Topology'] = _TOPOLOGY
+DESCRIPTOR.message_types_by_name['Link'] = _LINK
+DESCRIPTOR.message_types_by_name['TopologyId'] = _TOPOLOGYID
+DESCRIPTOR.message_types_by_name['Constraint'] = _CONSTRAINT
+DESCRIPTOR.message_types_by_name['Device'] = _DEVICE
+DESCRIPTOR.message_types_by_name['DeviceConfig'] = _DEVICECONFIG
+DESCRIPTOR.message_types_by_name['EndPoint'] = _ENDPOINT
+DESCRIPTOR.message_types_by_name['EndPointId'] = _ENDPOINTID
+DESCRIPTOR.message_types_by_name['DeviceId'] = _DEVICEID
+DESCRIPTOR.message_types_by_name['Uuid'] = _UUID
+DESCRIPTOR.message_types_by_name['TeraFlowController'] = _TERAFLOWCONTROLLER
+DESCRIPTOR.message_types_by_name['AuthenticationResult'] = _AUTHENTICATIONRESULT
+DESCRIPTOR.enum_types_by_name['DeviceOperationalStatus'] = _DEVICEOPERATIONALSTATUS
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), {
+  'DESCRIPTOR' : _EMPTY,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Empty)
+  })
+_sym_db.RegisterMessage(Empty)
+
+Context = _reflection.GeneratedProtocolMessageType('Context', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Context)
+  })
+_sym_db.RegisterMessage(Context)
+
+ContextId = _reflection.GeneratedProtocolMessageType('ContextId', (_message.Message,), {
+  'DESCRIPTOR' : _CONTEXTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.ContextId)
+  })
+_sym_db.RegisterMessage(ContextId)
+
+Topology = _reflection.GeneratedProtocolMessageType('Topology', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGY,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Topology)
+  })
+_sym_db.RegisterMessage(Topology)
+
+Link = _reflection.GeneratedProtocolMessageType('Link', (_message.Message,), {
+  'DESCRIPTOR' : _LINK,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Link)
+  })
+_sym_db.RegisterMessage(Link)
+
+TopologyId = _reflection.GeneratedProtocolMessageType('TopologyId', (_message.Message,), {
+  'DESCRIPTOR' : _TOPOLOGYID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.TopologyId)
+  })
+_sym_db.RegisterMessage(TopologyId)
+
+Constraint = _reflection.GeneratedProtocolMessageType('Constraint', (_message.Message,), {
+  'DESCRIPTOR' : _CONSTRAINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Constraint)
+  })
+_sym_db.RegisterMessage(Constraint)
+
+Device = _reflection.GeneratedProtocolMessageType('Device', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICE,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Device)
+  })
+_sym_db.RegisterMessage(Device)
+
+DeviceConfig = _reflection.GeneratedProtocolMessageType('DeviceConfig', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICECONFIG,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.DeviceConfig)
+  })
+_sym_db.RegisterMessage(DeviceConfig)
+
+EndPoint = _reflection.GeneratedProtocolMessageType('EndPoint', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPoint)
+  })
+_sym_db.RegisterMessage(EndPoint)
+
+EndPointId = _reflection.GeneratedProtocolMessageType('EndPointId', (_message.Message,), {
+  'DESCRIPTOR' : _ENDPOINTID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.EndPointId)
+  })
+_sym_db.RegisterMessage(EndPointId)
+
+DeviceId = _reflection.GeneratedProtocolMessageType('DeviceId', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICEID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.DeviceId)
+  })
+_sym_db.RegisterMessage(DeviceId)
+
+Uuid = _reflection.GeneratedProtocolMessageType('Uuid', (_message.Message,), {
+  'DESCRIPTOR' : _UUID,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.Uuid)
+  })
+_sym_db.RegisterMessage(Uuid)
+
+TeraFlowController = _reflection.GeneratedProtocolMessageType('TeraFlowController', (_message.Message,), {
+  'DESCRIPTOR' : _TERAFLOWCONTROLLER,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.TeraFlowController)
+  })
+_sym_db.RegisterMessage(TeraFlowController)
+
+AuthenticationResult = _reflection.GeneratedProtocolMessageType('AuthenticationResult', (_message.Message,), {
+  'DESCRIPTOR' : _AUTHENTICATIONRESULT,
+  '__module__' : 'context_pb2'
+  # @@protoc_insertion_point(class_scope:context.AuthenticationResult)
+  })
+_sym_db.RegisterMessage(AuthenticationResult)
+
+
+
+_CONTEXTSERVICE = _descriptor.ServiceDescriptor(
+  name='ContextService',
+  full_name='context.ContextService',
+  file=DESCRIPTOR,
+  index=0,
+  serialized_options=None,
+  create_key=_descriptor._internal_create_key,
+  serialized_start=1249,
+  serialized_end=1317,
+  methods=[
+  _descriptor.MethodDescriptor(
+    name='GetTopology',
+    full_name='context.ContextService.GetTopology',
+    index=0,
+    containing_service=None,
+    input_type=_EMPTY,
+    output_type=_TOPOLOGY,
+    serialized_options=None,
+    create_key=_descriptor._internal_create_key,
+  ),
+])
+_sym_db.RegisterServiceDescriptor(_CONTEXTSERVICE)
+
+DESCRIPTOR.services_by_name['ContextService'] = _CONTEXTSERVICE
+
+# @@protoc_insertion_point(module_scope)
diff --git a/src/context/proto/context_pb2_grpc.py b/src/context/proto/context_pb2_grpc.py
new file mode 100644
index 0000000000000000000000000000000000000000..51c61c053221a4bd98b322b96e3ea3ba95e7cbeb
--- /dev/null
+++ b/src/context/proto/context_pb2_grpc.py
@@ -0,0 +1,66 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+from context.proto import context_pb2 as context__pb2
+
+
+class ContextServiceStub(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+            channel: A grpc.Channel.
+        """
+        self.GetTopology = channel.unary_unary(
+                '/context.ContextService/GetTopology',
+                request_serializer=context__pb2.Empty.SerializeToString,
+                response_deserializer=context__pb2.Topology.FromString,
+                )
+
+
+class ContextServiceServicer(object):
+    """Missing associated documentation comment in .proto file."""
+
+    def GetTopology(self, request, context):
+        """Missing associated documentation comment in .proto file."""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_ContextServiceServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+            'GetTopology': grpc.unary_unary_rpc_method_handler(
+                    servicer.GetTopology,
+                    request_deserializer=context__pb2.Empty.FromString,
+                    response_serializer=context__pb2.Topology.SerializeToString,
+            ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+            'context.ContextService', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class ContextService(object):
+    """Missing associated documentation comment in .proto file."""
+
+    @staticmethod
+    def GetTopology(request,
+            target,
+            options=(),
+            channel_credentials=None,
+            call_credentials=None,
+            insecure=False,
+            compression=None,
+            wait_for_ready=None,
+            timeout=None,
+            metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/context.ContextService/GetTopology',
+            context__pb2.Empty.SerializeToString,
+            context__pb2.Topology.FromString,
+            options, channel_credentials,
+            insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/src/context/requirements.in b/src/context/requirements.in
new file mode 100644
index 0000000000000000000000000000000000000000..25abdad1b5767117956a88b816399635348884c7
--- /dev/null
+++ b/src/context/requirements.in
@@ -0,0 +1,6 @@
+grpcio-health-checking
+grpcio
+prometheus-client
+pytest
+pytest-benchmark
+redis
diff --git a/src/context/run_integration_tests.sh b/src/context/run_integration_tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..ab607bed4c644caed9722ae99f6bba7151e82a5c
--- /dev/null
+++ b/src/context/run_integration_tests.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+# Make folder containing the script the root folder for its execution
+cd $(dirname $0)
+
+ENDPOINT=($(kubectl --namespace teraflow-development get service contextservice -o 'jsonpath={.spec.clusterIP} {.spec.ports[?(@.name=="grpc")].port}'))
+docker run -it --env TEST_TARGET_ADDRESS=${ENDPOINT[0]} --env TEST_TARGET_PORT=${ENDPOINT[1]} context_service:test
diff --git a/src/context/run_unitary_tests.sh b/src/context/run_unitary_tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..08e941f31502fe8dc32ffcfc1563c2223bb4d8d3
--- /dev/null
+++ b/src/context/run_unitary_tests.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+# Make folder containing the script the root folder for its execution
+cd $(dirname $0)
+
+mkdir -p data
+pytest -v --log-level=DEBUG tests/test_unitary.py
diff --git a/src/context/service/ContextService.py b/src/context/service/ContextService.py
new file mode 100644
index 0000000000000000000000000000000000000000..41c23266592b6e7b83cc03472777b21c68616280
--- /dev/null
+++ b/src/context/service/ContextService.py
@@ -0,0 +1,55 @@
+import grpc
+import logging
+from concurrent import futures
+from grpc_health.v1.health import HealthServicer, OVERALL_HEALTH
+from grpc_health.v1.health_pb2 import HealthCheckResponse
+from grpc_health.v1.health_pb2_grpc import add_HealthServicer_to_server
+from context.proto.context_pb2_grpc import add_ContextServiceServicer_to_server
+from context.service.ContextServiceServicerImpl import ContextServiceServicerImpl
+from context.Config import SERVICE_PORT, MAX_WORKERS, GRACE_PERIOD
+
+BIND_ADDRESS = '0.0.0.0'
+LOGGER = logging.getLogger(__name__)
+
+class ContextService:
+    def __init__(self, database, address=BIND_ADDRESS, port=SERVICE_PORT, max_workers=MAX_WORKERS,
+                 grace_period=GRACE_PERIOD):
+        self.database = database
+        self.address = address
+        self.port = port
+        self.endpoint = None
+        self.max_workers = max_workers
+        self.grace_period = grace_period
+        self.context_servicer = None
+        self.health_servicer = None
+        self.pool = None
+        self.server = None
+
+    def start(self):
+        self.endpoint = '{}:{}'.format(self.address, self.port)
+        LOGGER.debug('Starting Service (tentative endpoint: {}, max_workers: {})...'.format(
+            self.endpoint, self.max_workers))
+
+        self.pool = futures.ThreadPoolExecutor(max_workers=self.max_workers)
+        self.server = grpc.server(self.pool) # , interceptors=(tracer_interceptor,))
+
+        self.context_servicer = ContextServiceServicerImpl(self.database)
+        add_ContextServiceServicer_to_server(self.context_servicer, self.server)
+
+        self.health_servicer = HealthServicer(
+            experimental_non_blocking=True, experimental_thread_pool=futures.ThreadPoolExecutor(max_workers=1))
+        add_HealthServicer_to_server(self.health_servicer, self.server)
+
+        port = self.server.add_insecure_port(self.endpoint)
+        self.endpoint = '{}:{}'.format(self.address, port)
+        LOGGER.info('Listening on {}...'.format(self.endpoint))
+        self.server.start()
+        self.health_servicer.set(OVERALL_HEALTH, HealthCheckResponse.SERVING) # pylint: disable=maybe-no-member
+
+        LOGGER.debug('Service started')
+
+    def stop(self):
+        LOGGER.debug('Stopping service (grace period {} seconds)...'.format(self.grace_period))
+        self.health_servicer.enter_graceful_shutdown()
+        self.server.stop(self.grace_period)
+        LOGGER.debug('Service stopped')
diff --git a/src/context/service/ContextServiceServicerImpl.py b/src/context/service/ContextServiceServicerImpl.py
new file mode 100644
index 0000000000000000000000000000000000000000..599c8b2c4985957426c56b922937fc529283e5fb
--- /dev/null
+++ b/src/context/service/ContextServiceServicerImpl.py
@@ -0,0 +1,37 @@
+import grpc, logging
+from prometheus_client import Counter, Histogram
+from context.proto.context_pb2 import Topology
+from context.proto.context_pb2_grpc import ContextServiceServicer
+
+LOGGER = logging.getLogger(__name__)
+
+GETTOPOLOGY_COUNTER_STARTED    = Counter  ('context_gettopology_counter_started',
+                                          'Context:GetTopology counter of requests started'  )
+GETTOPOLOGY_COUNTER_COMPLETED  = Counter  ('context_gettopology_counter_completed',
+                                          'Context:GetTopology counter of requests completed')
+GETTOPOLOGY_COUNTER_FAILED     = Counter  ('context_gettopology_counter_failed',
+                                          'Context:GetTopology counter of requests failed'   )
+GETTOPOLOGY_HISTOGRAM_DURATION = Histogram('context_gettopology_histogram_duration',
+                                          'Context:GetTopology histogram of request duration')
+
+class ContextServiceServicerImpl(ContextServiceServicer):
+    def __init__(self, database):
+        LOGGER.debug('Creating Servicer...')
+        self.database = database
+        LOGGER.debug('Servicer Created')
+
+    @GETTOPOLOGY_HISTOGRAM_DURATION.time()
+    def GetTopology(self, request, context):
+        # request=Empty(), returns=Topology()
+        GETTOPOLOGY_COUNTER_STARTED.inc()
+        try:
+            LOGGER.debug('GetTopology request: {}'.format(str(request)))
+            reply = Topology(**self.database.get_topology())
+            LOGGER.debug('GetTopology reply: {}'.format(str(reply)))
+            GETTOPOLOGY_COUNTER_COMPLETED.inc()
+            return reply
+        except:
+            LOGGER.exception('GetTopology exception')
+            GETTOPOLOGY_COUNTER_FAILED.inc()
+            context.set_code(grpc.StatusCode.INTERNAL)
+            return Topology()
diff --git a/src/context/service/__init__.py b/src/context/service/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/context/service/__main__.py b/src/context/service/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..5c20c3cec3c67c8d5a00d32d4b44a63758c9461c
--- /dev/null
+++ b/src/context/service/__main__.py
@@ -0,0 +1,52 @@
+import logging, os, signal, sys, threading
+from prometheus_client import start_http_server
+from common.database.Factory import get_database
+from context.service.ContextService import ContextService
+from context.Config import SERVICE_PORT, MAX_WORKERS, GRACE_PERIOD, LOG_LEVEL, METRICS_PORT
+
+terminate = threading.Event()
+logger = None
+
+def signal_handler(signal, frame):
+    global terminate, logger
+    logger.warning('Terminate signal received')
+    terminate.set()
+
+def main():
+    global terminate, logger
+
+    service_port = os.environ.get('CONTEXTSERVICE_SERVICE_PORT_GRPC', SERVICE_PORT)
+    max_workers  = os.environ.get('MAX_WORKERS',  MAX_WORKERS )
+    grace_period = os.environ.get('GRACE_PERIOD', GRACE_PERIOD)
+    log_level    = os.environ.get('LOG_LEVEL',    LOG_LEVEL   )
+    metrics_port = os.environ.get('METRICS_PORT', METRICS_PORT)
+
+    logging.basicConfig(level=log_level)
+    logger = logging.getLogger(__name__)
+
+    signal.signal(signal.SIGINT,  signal_handler)
+    signal.signal(signal.SIGTERM, signal_handler)
+
+    logger.info('Starting...')
+
+    # Start metrics server
+    start_http_server(metrics_port)
+
+    # Get database instance
+    database = get_database()
+
+    # Starting context service
+    service = ContextService(database, port=service_port, max_workers=max_workers, grace_period=grace_period)
+    service.start()
+
+    # Wait for Ctrl+C or termination signal
+    while not terminate.wait(0.1): pass
+
+    logger.info('Terminating...')
+    service.stop()
+
+    logger.info('Bye')
+    return(0)
+
+if __name__ == '__main__':
+    sys.exit(main())
diff --git a/src/context/tests/__init__.py b/src/context/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/src/context/tests/test_integration.py b/src/context/tests/test_integration.py
new file mode 100644
index 0000000000000000000000000000000000000000..eab068b493a06754ec335ea118fa60e671fddec7
--- /dev/null
+++ b/src/context/tests/test_integration.py
@@ -0,0 +1,24 @@
+import logging, os, pytest, sys
+
+from pathlib import Path
+sys.path.append(__file__.split('src')[0] + 'src')
+print(sys.path)
+
+from context.client.ContextClient import ContextClient
+from context.proto.context_pb2 import Empty
+from .tools.ValidateTopology import validate_topology_dict
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+@pytest.fixture(scope='session')
+def remote_context_client():
+    address = os.environ.get('TEST_TARGET_ADDRESS')
+    if(address is None): raise Exception('EnvironmentVariable(TEST_TARGET_ADDRESS) not specified')
+    port = os.environ.get('TEST_TARGET_PORT')
+    if(port is None): raise Exception('EnvironmentVariable(TEST_TARGET_PORT) not specified')
+    return ContextClient(address=address, port=port)
+
+def test_remote_get_topology(remote_context_client):
+    response = remote_context_client.GetTopology(Empty())
+    validate_topology_dict(response)
diff --git a/src/context/tests/test_unitary.py b/src/context/tests/test_unitary.py
new file mode 100644
index 0000000000000000000000000000000000000000..61e580ea704260cd034273e2bd74ae9fbbd606e6
--- /dev/null
+++ b/src/context/tests/test_unitary.py
@@ -0,0 +1,31 @@
+import logging, pytest, sys
+
+from pathlib import Path
+sys.path.append(__file__.split('src')[0] + 'src')
+print(sys.path)
+
+from context.client.ContextClient import ContextClient
+from context.database.Factory import get_database, DatabaseEngineEnum
+from context.proto.context_pb2 import Empty
+from context.service.ContextService import ContextService
+from context.Config import SERVICE_PORT, MAX_WORKERS, GRACE_PERIOD
+from context.tests.tools.ValidateTopology import validate_topology_dict
+
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+
+@pytest.fixture(scope='session')
+def local_context_service():
+    database = get_database(engine=DatabaseEngineEnum.INMEMORY, filepath='data/topo_nsfnet.json')
+    _service = ContextService(database, port=SERVICE_PORT, max_workers=MAX_WORKERS, grace_period=GRACE_PERIOD)
+    _service.start()
+    yield _service
+    _service.stop()
+
+@pytest.fixture(scope='session')
+def local_context_client(local_context_service):
+    return ContextClient(address='127.0.0.1', port=SERVICE_PORT)
+
+def test_local_get_topology(local_context_client):
+    response = local_context_client.GetTopology(Empty())
+    validate_topology_dict(response)
diff --git a/src/context/tests/tools/ValidateTopology.py b/src/context/tests/tools/ValidateTopology.py
new file mode 100644
index 0000000000000000000000000000000000000000..b52546e39c27292bec4f11755dade987929e5e71
--- /dev/null
+++ b/src/context/tests/tools/ValidateTopology.py
@@ -0,0 +1,6 @@
+def validate_topology_dict(topology):
+    assert type(topology) is dict
+    assert len(topology.keys()) > 0
+    assert 'topoId' in topology
+    assert 'device' in topology
+    assert 'link' in topology
diff --git a/src/context/tests/tools/__init__.py b/src/context/tests/tools/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391