Skip to content
Snippets Groups Projects
Commit 817351a5 authored by Lluis Gifre Renom's avatar Lluis Gifre Renom
Browse files

PathComp component:

Backend:
- Increased number of allowed edges from 10 to 20

Frontend:
- reduced log levels for messages
- added mutex to prevent issuing multiple calls to the backend at the same time
parent 2f520913
No related branches found
No related tags found
2 merge requests!54Release 2.0.0,!36Performance Evaluation Framework + Helper Tools
......@@ -121,7 +121,7 @@ struct map_nodes_t {
};
#define MAX_NUM_VERTICES 20 // 100 # LGR: reduced from 100 to 20 to divide by 5 the memory used
#define MAX_NUM_EDGES 10 // 100 # LGR: reduced from 100 to 10 to divide by 10 the memory used
#define MAX_NUM_EDGES 20 // 100 # LGR: reduced from 100 to 20 to divide by 5 the memory used
// Structures for the graph composition
struct targetNodes_t {
// remote / targeted node
......
......@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
import grpc, logging
import grpc, logging, threading
from common.Constants import DEFAULT_CONTEXT_UUID, INTERDOMAIN_TOPOLOGY_UUID
from common.method_wrappers.Decorator import MetricsPool, safe_and_metered_rpc_method
from common.proto.context_pb2 import ContextId, Empty
......@@ -35,11 +35,12 @@ ADMIN_CONTEXT_ID = ContextId(**json_context_id(DEFAULT_CONTEXT_UUID))
class PathCompServiceServicerImpl(PathCompServiceServicer):
def __init__(self) -> None:
LOGGER.debug('Creating Servicer...')
self._lock = threading.Lock()
LOGGER.debug('Servicer Created')
@safe_and_metered_rpc_method(METRICS_POOL, LOGGER)
def Compute(self, request : PathCompRequest, context : grpc.ServicerContext) -> PathCompReply:
LOGGER.info('[Compute] begin ; request = {:s}'.format(grpc_message_to_json_string(request)))
LOGGER.debug('[Compute] begin ; request = {:s}'.format(grpc_message_to_json_string(request)))
context_client = ContextClient()
......@@ -66,8 +67,10 @@ class PathCompServiceServicerImpl(PathCompServiceServicer):
#import time
#ts = time.time()
#algorithm.execute('request-{:f}.json'.format(ts), 'reply-{:f}.json'.format(ts))
algorithm.execute()
with self._lock:
# ensure backend receives requests one at a time
algorithm.execute()
reply = algorithm.get_reply()
LOGGER.info('[Compute] end ; reply = {:s}'.format(grpc_message_to_json_string(reply)))
LOGGER.debug('[Compute] end ; reply = {:s}'.format(grpc_message_to_json_string(reply)))
return reply
......@@ -93,22 +93,22 @@ class _Algorithm:
def execute(self, dump_request_filename : Optional[str] = None, dump_reply_filename : Optional[str] = None) -> None:
request = {'serviceList': self.service_list, 'deviceList': self.device_list, 'linkList': self.link_list}
self.logger.info('[execute] request={:s}'.format(str(request)))
self.logger.debug('[execute] request={:s}'.format(str(request)))
if dump_request_filename is not None:
with open(dump_request_filename, 'w', encoding='UTF-8') as f:
f.write(json.dumps(request, sort_keys=True, indent=4))
self.logger.info('[execute] BACKEND_URL: {:s}'.format(str(BACKEND_URL)))
self.logger.debug('[execute] BACKEND_URL: {:s}'.format(str(BACKEND_URL)))
reply = requests.post(BACKEND_URL, json=request)
self.status_code = reply.status_code
self.raw_reply = reply.content.decode('UTF-8')
self.logger.info('[execute] status_code={:s} reply={:s}'.format(str(reply.status_code), str(self.raw_reply)))
self.logger.debug('[execute] status_code={:s} reply={:s}'.format(str(reply.status_code), str(self.raw_reply)))
if dump_reply_filename is not None:
with open(dump_reply_filename, 'w', encoding='UTF-8') as f:
f.write('status_code={:s} reply={:s}'.format(str(self.status_code), str(self.raw_reply)))
if reply.status_code not in {requests.codes.ok}:
if reply.status_code not in {requests.codes.ok}: # pylint: disable=no-member
raise Exception('Backend error({:s}) for request({:s})'.format(
str(self.raw_reply), json.dumps(request, sort_keys=True)))
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment