diff --git a/src/inference/inference_tvm_mxnet.py b/src/inference/inference_tvm_mxnet.py index 1bf137db4..a07428267 100644 --- a/src/inference/inference_tvm_mxnet.py +++ b/src/inference/inference_tvm_mxnet.py @@ -225,6 +225,7 @@ def main(): try: log.info('Converting output tensor to print results') res = prepare_output(result, args.task, args.output_names) + log.info('Inference results') io.process_output(res, log) except Exception as ex: @@ -234,6 +235,7 @@ def main(): inference_result = pp.calculate_performance_metrics_sync_mode(args.batch_size, infer_time) report_writer.update_execution_results(**inference_result) report_writer.write_report(args.report_path) + log.info(f'Performance results:\n{json.dumps(inference_result, indent=4)}') except Exception: log.error(traceback.format_exc()) diff --git a/src/inference/inference_tvm_onnx.py b/src/inference/inference_tvm_onnx.py index 81f2a2c66..57a45f836 100644 --- a/src/inference/inference_tvm_onnx.py +++ b/src/inference/inference_tvm_onnx.py @@ -189,6 +189,7 @@ def main(): try: log.info('Converting output tensor to print results') res = prepare_output(result, args.task, args.output_names) + log.info('Inference results') io.process_output(res, log) except Exception as ex: @@ -198,6 +199,7 @@ def main(): inference_result = pp.calculate_performance_metrics_sync_mode(args.batch_size, infer_time) report_writer.update_execution_results(**inference_result) report_writer.write_report(args.report_path) + log.info(f'Performance results:\n{json.dumps(inference_result, indent=4)}') except Exception: log.error(traceback.format_exc()) diff --git a/src/inference/inference_tvm_pytorch.py b/src/inference/inference_tvm_pytorch.py index b59ef2566..c911e36fd 100644 --- a/src/inference/inference_tvm_pytorch.py +++ b/src/inference/inference_tvm_pytorch.py @@ -154,6 +154,7 @@ def _convert_model_from_framework(self, target, dev): model_name = self.args['model_name'] opt_lev = self.args['opt_level'] module = 'torchvision.models' + log.info('Get model from TorchVision') model = importlib.import_module(module).__getattribute__(model_name) pt_model = model(weights=True) @@ -163,6 +164,7 @@ def _convert_model_from_framework(self, target, dev): scripted_model = torch.jit.trace(pt_model, input_data).eval() input_name = self.args['input_name'] shape_list = [(input_name, input_shape)] + log.info('Creating graph module from PyTorch model') model, params = tvm.relay.frontend.from_pytorch(scripted_model, shape_list) with tvm.transform.PassContext(opt_level=opt_lev): @@ -202,6 +204,7 @@ def main(): try: log.info('Converting output tensor to print results') res = prepare_output(result, args.task, args.output_names) + log.info('Inference results') io.process_output(res, log) except Exception as ex: @@ -211,6 +214,7 @@ def main(): inference_result = pp.calculate_performance_metrics_sync_mode(args.batch_size, infer_time) report_writer.update_execution_results(**inference_result) report_writer.write_report(args.report_path) + log.info(f'Performance results:\n{json.dumps(inference_result, indent=4)}') except Exception: log.error(traceback.format_exc())