From d2f3fabe8c27405f0efac201033dbb4639ffd37f Mon Sep 17 00:00:00 2001 From: Chris Sidebottom Date: Wed, 4 Aug 2021 12:05:12 +0000 Subject: [PATCH 1/2] Refactor AOT Test Utils parameters into object `compile_and_run` was getting quite complicated to understand as well as being mostly duplicated by `comile_and_run_multiple_models`. This patch pulls out some common parameters into a data class `AOTTestNetwork` which makes it clearer what each parameter is doing and provides documentation. --- tests/python/relay/aot/aot_test_utils.py | 206 +++++++------------- tests/python/relay/aot/test_crt_aot.py | 177 ++++++----------- tests/python/relay/test_external_codegen.py | 11 +- 3 files changed, 137 insertions(+), 257 deletions(-) diff --git a/tests/python/relay/aot/aot_test_utils.py b/tests/python/relay/aot/aot_test_utils.py index d4d16346f8c2..2221e9ad7ad4 100644 --- a/tests/python/relay/aot/aot_test_utils.py +++ b/tests/python/relay/aot/aot_test_utils.py @@ -24,6 +24,7 @@ import shutil import subprocess import tarfile +from typing import NamedTuple, Union, Optional, List, Dict import pytest import numpy as np @@ -39,6 +40,30 @@ _LOG = logging.getLogger(__name__) +class AOTTestNetwork(NamedTuple): + """Class to describe a network under test + + Parameters + ---------- + module: tvm.IRModule + IRModule to generate AOT executor for + inputs: Dict[str, np.array] + Dict of input names to value arrays + outputs: List[np.array] + Ordered list of output value arrays + name: str + Name to use for this network + params: Optional[Dict[str, np.array]] + Dict of parameter names to value arrays + """ + + module: tvm.IRModule + inputs: Dict[str, np.array] + outputs: List[np.array] + name: str = "default" + params: Optional[Dict[str, np.array]] = None + + def mangle_name(mod_name, name): mod_name = mangle_module_name(mod_name) return mod_name + "_" + name @@ -303,7 +328,7 @@ def emit_main_micro_include(main_file, mod_name): main_file.write(f"#include <{mangle_module_name(mod_name)}.h>\n") -def create_main(test_name, input_map, output_list_map, output_path, interface_api, workspace_bytes): +def create_main(test_name, networks, output_path, interface_api, workspace_bytes): file_path = pathlib.Path(f"{output_path}/" + test_name).resolve() # create header file raw_path = file_path.with_suffix(".c").resolve() @@ -311,32 +336,26 @@ def create_main(test_name, input_map, output_list_map, output_path, interface_ap emit_main_common_includes(main_file) if interface_api == "c": - for mod_name in input_map: - emit_main_micro_include(main_file, mod_name) + for network in networks: + emit_main_micro_include(main_file, network.name) emit_main_prologue(main_file, workspace_bytes) - for mod_name in input_map: - emit_main_data(main_file, input_map[mod_name], output_list_map[mod_name], mod_name) + for network in networks: + emit_main_data(main_file, network.inputs, network.outputs, network.name) emit_main_init_memory_manager(main_file) if interface_api == "c": - for mod_name in input_map: - emit_main_data_structs( - main_file, input_map[mod_name], output_list_map[mod_name], mod_name - ) - emit_main_c_interface_call(main_file, mod_name) + for network in networks: + emit_main_data_structs(main_file, network.inputs, network.outputs, network.name) + emit_main_c_interface_call(main_file, network.name) else: emit_main_fake_packed_values(main_file) - for mod_name in input_map: - emit_main_data_setup( - main_file, input_map[mod_name], output_list_map[mod_name], mod_name - ) - emit_main_packed_call( - main_file, input_map[mod_name], output_list_map[mod_name], mod_name - ) - - for mod_name in input_map: - emit_main_compare(main_file, output_list_map[mod_name], mod_name) + for network in networks: + emit_main_data_setup(main_file, network.inputs, network.outputs, network.name) + emit_main_packed_call(main_file, network.inputs, network.outputs, network.name) + + for network in networks: + emit_main_compare(main_file, network.outputs, network.name) emit_main_epilogue(main_file) @@ -376,15 +395,11 @@ def extract_main_workspace_sizebytes(extract_dir): def compile_and_run( - mod, - inputs, - output_list, + networks: Union[List[AOTTestNetwork], AOTTestNetwork], interface_api, use_unpacked_api, use_calculated_workspaces, - params=None, workspace_byte_alignment=8, - mod_name="default", enable_op_fusion=True, ): """ @@ -395,6 +410,9 @@ def compile_and_run( target = f"{base_target} {extra_target}" cflags = f"-DTVM_RUNTIME_ALLOC_ALIGNMENT_BYTES={workspace_byte_alignment} " + if not isinstance(networks, list): + networks = [networks] + # The calculated workspaces will not account for stack allocator tags used for debugging if not use_calculated_workspaces: cflags += "-DTVM_CRT_STACK_ALLOCATOR_ENABLE_LIFO_CHECK " @@ -403,9 +421,6 @@ def compile_and_run( if not enable_op_fusion: config["relay.FuseOps.max_depth"] = 1 - with tvm.transform.PassContext(opt_level=3, config=config): - lib = tvm.relay.build(mod, target, target_host=target, params=params, mod_name=mod_name) - tmp_path = utils.tempdir() tmp_dir = tmp_path.temp_dir @@ -413,15 +428,6 @@ def compile_and_run( build_path = os.path.join(base_path, "build") os.makedirs(build_path, exist_ok=True) - tar_file = os.path.join(base_path, "test.tar") - export_model_library_format(lib, tar_file) - t = tarfile.open(tar_file) - t.extractall(base_path) - if use_calculated_workspaces: - workspace_bytes = extract_main_workspace_sizebytes(base_path) - else: - workspace_bytes = 16384 * 1024 - include_path = os.path.join(base_path, "include") os.mkdir(include_path) crt_root = tvm.micro.get_standalone_crt_dir() @@ -430,124 +436,52 @@ def compile_and_run( os.path.join(include_path, "crt_config.h"), ) - for key in inputs: - create_header_file( - f'{mangle_name(mod_name, "input_data")}_{key}', - inputs[key], - os.path.join(base_path, "include"), - ) - - for i in range(len(output_list)): - create_header_file( - f'{mangle_name(mod_name,"output_data")}{i}', - np.zeros(output_list[i].shape, output_list[i].dtype), - os.path.join(base_path, "include"), - ) - create_header_file( - f'{mangle_name(mod_name, "expected_output_data")}{i}', - output_list[i], - os.path.join(base_path, "include"), - ) - - create_main( - "test.c", - {mod_name: inputs}, - {mod_name: output_list}, - build_path, - interface_api, - workspace_bytes, - ) - - # Verify that compiles fine - file_dir = os.path.dirname(os.path.abspath(__file__)) - codegen_path = os.path.join(base_path, "codegen") - makefile = os.path.join(file_dir, "aot_test.mk") - make_cmd = ( - f"make CFLAGS='{cflags}' -f {makefile} build_dir=" - + build_path - + f" TVM_ROOT={file_dir}/../../../.." - + f" CODEGEN_ROOT={codegen_path}" - + f" STANDALONE_CRT_DIR={tvm.micro.get_standalone_crt_dir()}" - ) - - compile_log_path = os.path.join(build_path, "test_compile.log") - ret = subprocess_log_output(make_cmd, ".", compile_log_path) - assert ret == 0 - - # Verify that runs fine - run_log_path = os.path.join(build_path, "test_run.log") - ret = subprocess_log_output("./aot_test_runner", build_path, run_log_path) - assert ret == 0 - - -def compile_and_run_multiple_models( - mod_map, - input_list_map, - output_list_map, - interface_api, - use_unpacked_api, - use_calculated_workspaces, - param_map, - workspace_byte_alignment=8, -): - """ - This method verifies the generated source - """ - base_target = "c -runtime=c --link-params --executor=aot" - extra_target = f"--workspace-byte-alignment={workspace_byte_alignment} --interface-api={interface_api} --unpacked-api={int(use_unpacked_api)}" - target = f"{base_target} {extra_target}" - tmp_path = utils.tempdir() - tmp_dir = tmp_path.temp_dir - - base_path = os.path.join(tmp_dir, "test") - build_path = os.path.join(base_path, "build") - os.makedirs(build_path, exist_ok=True) - - include_path = os.path.join(base_path, "include") - os.mkdir(include_path) - crt_root = tvm.micro.get_standalone_crt_dir() - shutil.copy2( - os.path.join(crt_root, "template", "crt_config-template.h"), - os.path.join(include_path, "crt_config.h"), - ) - - for mod_name, mod in mod_map.items(): - - with tvm.transform.PassContext(opt_level=3, config={"tir.disable_vectorize": True}): + workspace_bytes = 0 + for network in networks: + with tvm.transform.PassContext(opt_level=3, config=config): lib = tvm.relay.build( - mod, target, target_host=target, params=param_map[mod_name], mod_name=mod_name + network.module, + target, + target_host=target, + params=network.params, + mod_name=network.name, ) - tar_file = os.path.join(base_path, "test.tar") + tar_file = os.path.join(base_path, f"{network.name}.tar") export_model_library_format(lib, tar_file) t = tarfile.open(tar_file) t.extractall(base_path) - input_list = input_list_map[mod_name] - output_list = output_list_map[mod_name] + if use_calculated_workspaces: + workspace_bytes += extract_main_workspace_sizebytes(base_path) + else: + workspace_bytes += 16384 * 1024 - for key in input_list: + for key in network.inputs: create_header_file( - (f'{mangle_name(mod_name,"input_data")}_{key}'), input_list[key], build_path + f'{mangle_name(network.name, "input_data")}_{key}', + network.inputs[key], + include_path, ) - for i in range(len(output_list_map[mod_name])): + for i in range(len(network.outputs)): create_header_file( - (f'{mangle_name(mod_name,"output_data")}{i}'), - np.zeros(output_list[i].shape, output_list[i].dtype), - build_path, + (f'{mangle_name(network.name,"output_data")}{i}'), + np.zeros(network.outputs[i].shape, network.outputs[i].dtype), + include_path, ) create_header_file( - (f'{mangle_name(mod_name,"expected_output_data")}{i}'), output_list[i], build_path + (f'{mangle_name(network.name, "expected_output_data")}{i}'), + network.outputs[i], + include_path, ) create_main( "test.c", - input_list_map, - output_list_map, + networks, build_path, interface_api, - workspace_bytes=16384 * 1024, + workspace_bytes, ) # Verify that compiles fine @@ -555,7 +489,7 @@ def compile_and_run_multiple_models( codegen_path = os.path.join(base_path, "codegen") makefile = os.path.join(file_dir, "aot_test.mk") make_cmd = ( - f"make -f {makefile} build_dir=" + f"make CFLAGS='{cflags}' -f {makefile} build_dir=" + build_path + f" TVM_ROOT={file_dir}/../../../.." + f" CODEGEN_ROOT={codegen_path}" diff --git a/tests/python/relay/aot/test_crt_aot.py b/tests/python/relay/aot/test_crt_aot.py index 95d963710b25..3167cb4856f9 100644 --- a/tests/python/relay/aot/test_crt_aot.py +++ b/tests/python/relay/aot/test_crt_aot.py @@ -23,13 +23,14 @@ import tvm from tvm import relay +from tvm.ir.module import IRModule from tvm.relay import testing, transform from tvm.relay.testing import byoc from aot_test_utils import ( + AOTTestNetwork, generate_ref_data, convert_to_relay, compile_and_run, - compile_and_run_multiple_models, parametrize_aot_options, ) @@ -41,14 +42,12 @@ def test_error_c_interface_with_packed_api(): two = relay.add(relay.const(1), relay.const(1)) func = relay.Function([], two) - output_list = generate_ref_data(func, {}) - input_list = [] with pytest.raises(tvm.TVMError, match="Packed interface required for packed operators"): compile_and_run( - func, - input_list, - output_list, + AOTTestNetwork( + module=IRModule.from_expr(func), inputs={}, outputs=generate_ref_data(func, {}) + ), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -85,13 +84,10 @@ def @main(%data : Tensor[(1, 3, 64, 64), uint8], %weight : Tensor[(8, 3, 5, 5), output_list = generate_ref_data(mod, inputs, params) compile_and_run( - mod, - inputs, - output_list, + AOTTestNetwork(module=mod, inputs=inputs, outputs=output_list, params=params), interface_api, use_unpacked_api, use_calculated_workspaces, - params, ) @@ -110,68 +106,43 @@ def test_add_with_params(interface_api, use_unpacked_api, use_calculated_workspa output_list = generate_ref_data(func, inputs, params) compile_and_run( - func, - inputs, - output_list, + AOTTestNetwork( + module=IRModule.from_expr(func), inputs=inputs, outputs=output_list, params=params + ), interface_api, use_unpacked_api, use_calculated_workspaces, - params, ) @parametrize_aot_options -def test_conv2d(use_calculated_workspaces, interface_api, use_unpacked_api): +@pytest.mark.parametrize("groups,weight_shape", [(1, 32), (32, 1)]) +def test_conv2d(use_calculated_workspaces, interface_api, use_unpacked_api, groups, weight_shape): """Test a subgraph with a single conv2d operator.""" + dtype = "float32" + ishape = (1, 32, 14, 14) + wshape = (32, weight_shape, 3, 3) - def conv2d_direct(): - dtype = "float32" - ishape = (1, 32, 14, 14) - w1shape = (32, 32, 3, 3) - - data0 = relay.var("data", shape=ishape, dtype=dtype) - weight0 = relay.var("weight", shape=w1shape, dtype=dtype) - out = relay.nn.conv2d(data0, weight0, kernel_size=(3, 3), padding=(1, 1)) - main_f = relay.Function([data0, weight0], out) - mod = tvm.IRModule() - mod["main"] = main_f - mod = transform.InferType()(mod) - - i_data = np.random.uniform(0, 1, ishape).astype(dtype) - w1_data = np.random.uniform(0, 1, w1shape).astype(dtype) - - inputs = OrderedDict([("data", i_data), ("weight", w1_data)]) - return mod, inputs, (1, 32, 14, 14) - - def group_conv2d(): - dtype = "float32" - ishape = (1, 32, 14, 14) - w2shape = (32, 1, 3, 3) - - data0 = relay.var("data", shape=(ishape), dtype=dtype) - weight0 = relay.var("weight", shape=(w2shape), dtype=dtype) - out = relay.nn.conv2d(data0, weight0, kernel_size=(3, 3), padding=(1, 1), groups=32) - main_f = relay.Function([data0, weight0], out) - mod = tvm.IRModule() - mod["main"] = main_f - mod = transform.InferType()(mod) - - i_data = np.random.uniform(0, 1, ishape).astype(dtype) - w_data = np.random.uniform(0, 1, w2shape).astype(dtype) - - inputs = OrderedDict([("data", i_data), ("weight", w_data)]) - return mod, inputs, (1, 32, 14, 14) - - for mod, inputs, out_shape in [conv2d_direct(), group_conv2d()]: - output_list = generate_ref_data(mod, inputs) - compile_and_run( - mod, - inputs, - output_list, - interface_api, - use_unpacked_api, - use_calculated_workspaces, - ) + data0 = relay.var("data", shape=ishape, dtype=dtype) + weight0 = relay.var("weight", shape=wshape, dtype=dtype) + out = relay.nn.conv2d(data0, weight0, kernel_size=(3, 3), padding=(1, 1), groups=groups) + main_f = relay.Function([data0, weight0], out) + mod = tvm.IRModule() + mod["main"] = main_f + mod = transform.InferType()(mod) + + i_data = np.random.uniform(0, 1, ishape).astype(dtype) + w1_data = np.random.uniform(0, 1, wshape).astype(dtype) + + inputs = OrderedDict([("data", i_data), ("weight", w1_data)]) + + output_list = generate_ref_data(mod, inputs) + compile_and_run( + AOTTestNetwork(module=mod, inputs=inputs, outputs=output_list), + interface_api, + use_unpacked_api, + use_calculated_workspaces, + ) @parametrize_aot_options @@ -191,9 +162,7 @@ def test_concatenate(interface_api, use_unpacked_api, use_calculated_workspaces) output_list = generate_ref_data(func, inputs) compile_and_run( - func, - inputs, - output_list, + AOTTestNetwork(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -215,9 +184,7 @@ def test_nested_tuples(interface_api, use_unpacked_api, use_calculated_workspace output_list = generate_ref_data(func, inputs) compile_and_run( - func, - inputs, - output_list, + AOTTestNetwork(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -228,12 +195,9 @@ def test_nested_tuples(interface_api, use_unpacked_api, use_calculated_workspace def test_tuple_getitem(interface_api, use_unpacked_api, use_calculated_workspaces): func = relay.Function([], relay.TupleGetItem(relay.Tuple([relay.const(1), relay.const(2)]), 0)) output_list = generate_ref_data(func, {}) - inputs = {} compile_and_run( - func, - inputs, - output_list, + AOTTestNetwork(module=IRModule.from_expr(func), inputs={}, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -249,9 +213,7 @@ def test_id(interface_api, use_unpacked_api, use_calculated_workspaces): output_list = generate_ref_data(ident, inputs) compile_and_run( - ident, - inputs, - output_list, + AOTTestNetwork(module=IRModule.from_expr(ident), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -263,12 +225,9 @@ def test_add_const(interface_api, use_unpacked_api, use_calculated_workspaces): two = relay.add(relay.const(1), relay.const(1)) func = relay.Function([], two) output_list = generate_ref_data(func, {}) - inputs = {} compile_and_run( - func, - inputs, - output_list, + AOTTestNetwork(module=IRModule.from_expr(func), inputs={}, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -287,9 +246,7 @@ def test_mul_param(interface_api, use_unpacked_api, use_calculated_workspaces): output_list = generate_ref_data(func, inputs) compile_and_run( - func, - inputs, - output_list, + AOTTestNetwork(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -305,9 +262,7 @@ def test_subtract(interface_api, use_unpacked_api, use_calculated_workspaces): inputs = {"i": i_data} output_list = generate_ref_data(func, inputs) compile_and_run( - func, - inputs, - output_list, + AOTTestNetwork(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -326,9 +281,7 @@ def test_tuple_output(interface_api, use_unpacked_api, use_calculated_workspaces inputs = {"x": x_data} output_list = generate_ref_data(func, inputs) compile_and_run( - func, - inputs, - output_list, + AOTTestNetwork(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -348,13 +301,10 @@ def test_mobilenet(use_calculated_workspaces, workspace_byte_alignment): inputs = {"data": data} output_list = generate_ref_data(mod, inputs, params) compile_and_run( - mod, - inputs, - output_list, + AOTTestNetwork(module=mod, inputs=inputs, outputs=output_list, params=params), interface_api, use_unpacked_api, use_calculated_workspaces, - params, workspace_byte_alignment, ) @@ -407,13 +357,10 @@ def test_byoc_microtvm(use_calculated_workspaces): input_list = [map_inputs["x"]] input_list.extend([map_inputs["w{}".format(i)] for i in range(8)]) compile_and_run( - mod, - map_inputs, - output_list, + AOTTestNetwork(name="my_mod", module=mod, inputs=map_inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, - mod_name="my_mod", ) @@ -432,14 +379,12 @@ def test_add_name_mangling_with_params(interface_api, use_unpacked_api, use_calc output_list = generate_ref_data(func, inputs, params) compile_and_run( - func, - inputs, - output_list, + AOTTestNetwork( + name="my_mod", module=func, inputs=inputs, outputs=output_list, params=params + ), interface_api, use_unpacked_api, use_calculated_workspaces, - params=params, - mod_name="my_mod", ) @@ -481,19 +426,18 @@ def @main(%data : Tensor[(1, 3, 64, 64), uint8], %weight : Tensor[(8, 3, 5, 5), inputs2 = {"data": input_data} output_list2 = generate_ref_data(mod2, inputs2, params2) - input_list_map = {"mod1": inputs1, "mod2": inputs2} - output_list_map = {"mod1": output_list1, "mod2": output_list2} - mod_map = {"mod1": mod1, "mod2": mod2} - param_map = {"mod1": params1, "mod2": params2} - - compile_and_run_multiple_models( - mod_map, - input_list_map, - output_list_map, + compile_and_run( + [ + AOTTestNetwork( + name="mod1", module=mod1, inputs=inputs1, outputs=output_list1, params=params1 + ), + AOTTestNetwork( + name="mod2", module=mod2, inputs=inputs2, outputs=output_list2, params=params2 + ), + ], interface_api, use_unpacked_api, use_calculated_workspaces, - param_map, ) @@ -523,13 +467,10 @@ def test_quant_mobilenet_tfl(): inputs = {"input": data} output_list = generate_ref_data(mod, inputs, params) compile_and_run( - mod, - inputs, - output_list, + AOTTestNetwork(module=mod, inputs=inputs, outputs=output_list, params=params), interface_api, use_unpacked_api, use_calculated_workspaces, - params=params, ) @@ -553,9 +494,7 @@ def test_transpose(interface_api, use_unpacked_api, use_calculated_workspaces): inputs = {"x": x_data, "y": y_data, "z": t_data} output_list = generate_ref_data(func, inputs) compile_and_run( - func, - inputs, - output_list, + AOTTestNetwork(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, diff --git a/tests/python/relay/test_external_codegen.py b/tests/python/relay/test_external_codegen.py index 645f86fadac0..8b82f4931405 100644 --- a/tests/python/relay/test_external_codegen.py +++ b/tests/python/relay/test_external_codegen.py @@ -27,7 +27,7 @@ from tvm.contrib import utils from tvm.relay.build_module import bind_params_by_name from tvm.relay.op.annotation import compiler_begin, compiler_end -from aot.aot_test_utils import compile_and_run +from aot.aot_test_utils import AOTTestNetwork, compile_and_run def update_lib(lib): @@ -77,8 +77,15 @@ def check_graph_executor_result( def check_aot_executor_result( mod, map_inputs, out_shape, result, tol=1e-5, target="llvm", device=tvm.cpu() ): + interface_api = "packed" + use_unpacked_api = False use_calculated_workspaces = True - compile_and_run(mod, map_inputs, [result], "packed", 0, use_calculated_workspaces) + compile_and_run( + AOTTestNetwork(module=mod, inputs=map_inputs, outputs=[result]), + interface_api, + use_unpacked_api, + use_calculated_workspaces, + ) def set_external_func_attr(func, compiler, ext_symbol): From 2ed2ea8790f95ce3e0a7f230da80000bf920f5d3 Mon Sep 17 00:00:00 2001 From: Chris Sidebottom Date: Tue, 10 Aug 2021 15:53:39 +0100 Subject: [PATCH 2/2] Rename Network -> Model and sizebytes -> size_bytes --- tests/python/relay/aot/aot_test_utils.py | 70 ++++++++++----------- tests/python/relay/aot/test_crt_aot.py | 42 ++++++------- tests/python/relay/test_external_codegen.py | 4 +- 3 files changed, 57 insertions(+), 59 deletions(-) diff --git a/tests/python/relay/aot/aot_test_utils.py b/tests/python/relay/aot/aot_test_utils.py index 2221e9ad7ad4..36c415ec8c83 100644 --- a/tests/python/relay/aot/aot_test_utils.py +++ b/tests/python/relay/aot/aot_test_utils.py @@ -40,8 +40,8 @@ _LOG = logging.getLogger(__name__) -class AOTTestNetwork(NamedTuple): - """Class to describe a network under test +class AOTTestModel(NamedTuple): + """Class to describe a model under test Parameters ---------- @@ -52,7 +52,7 @@ class AOTTestNetwork(NamedTuple): outputs: List[np.array] Ordered list of output value arrays name: str - Name to use for this network + Name to use for this model params: Optional[Dict[str, np.array]] Dict of parameter names to value arrays """ @@ -328,7 +328,7 @@ def emit_main_micro_include(main_file, mod_name): main_file.write(f"#include <{mangle_module_name(mod_name)}.h>\n") -def create_main(test_name, networks, output_path, interface_api, workspace_bytes): +def create_main(test_name, models, output_path, interface_api, workspace_bytes): file_path = pathlib.Path(f"{output_path}/" + test_name).resolve() # create header file raw_path = file_path.with_suffix(".c").resolve() @@ -336,26 +336,26 @@ def create_main(test_name, networks, output_path, interface_api, workspace_bytes emit_main_common_includes(main_file) if interface_api == "c": - for network in networks: - emit_main_micro_include(main_file, network.name) + for model in models: + emit_main_micro_include(main_file, model.name) emit_main_prologue(main_file, workspace_bytes) - for network in networks: - emit_main_data(main_file, network.inputs, network.outputs, network.name) + for model in models: + emit_main_data(main_file, model.inputs, model.outputs, model.name) emit_main_init_memory_manager(main_file) if interface_api == "c": - for network in networks: - emit_main_data_structs(main_file, network.inputs, network.outputs, network.name) - emit_main_c_interface_call(main_file, network.name) + for model in models: + emit_main_data_structs(main_file, model.inputs, model.outputs, model.name) + emit_main_c_interface_call(main_file, model.name) else: emit_main_fake_packed_values(main_file) - for network in networks: - emit_main_data_setup(main_file, network.inputs, network.outputs, network.name) - emit_main_packed_call(main_file, network.inputs, network.outputs, network.name) + for model in models: + emit_main_data_setup(main_file, model.inputs, model.outputs, model.name) + emit_main_packed_call(main_file, model.inputs, model.outputs, model.name) - for network in networks: - emit_main_compare(main_file, network.outputs, network.name) + for model in models: + emit_main_compare(main_file, model.outputs, model.name) emit_main_epilogue(main_file) @@ -388,14 +388,14 @@ def create_header_file(tensor_name, npy_data, output_path): header_file.write("};\n\n") -def extract_main_workspace_sizebytes(extract_dir): +def extract_main_workspace_size_bytes(extract_dir): with open(os.path.join(extract_dir, "metadata.json")) as json_f: metadata = json.load(json_f) return metadata["memory"]["functions"]["main"][0]["workspace_size_bytes"] def compile_and_run( - networks: Union[List[AOTTestNetwork], AOTTestNetwork], + models: Union[List[AOTTestModel], AOTTestModel], interface_api, use_unpacked_api, use_calculated_workspaces, @@ -410,8 +410,8 @@ def compile_and_run( target = f"{base_target} {extra_target}" cflags = f"-DTVM_RUNTIME_ALLOC_ALIGNMENT_BYTES={workspace_byte_alignment} " - if not isinstance(networks, list): - networks = [networks] + if not isinstance(models, list): + models = [models] # The calculated workspaces will not account for stack allocator tags used for debugging if not use_calculated_workspaces: @@ -437,48 +437,48 @@ def compile_and_run( ) workspace_bytes = 0 - for network in networks: + for model in models: with tvm.transform.PassContext(opt_level=3, config=config): lib = tvm.relay.build( - network.module, + model.module, target, target_host=target, - params=network.params, - mod_name=network.name, + params=model.params, + mod_name=model.name, ) - tar_file = os.path.join(base_path, f"{network.name}.tar") + tar_file = os.path.join(base_path, f"{model.name}.tar") export_model_library_format(lib, tar_file) t = tarfile.open(tar_file) t.extractall(base_path) if use_calculated_workspaces: - workspace_bytes += extract_main_workspace_sizebytes(base_path) + workspace_bytes += extract_main_workspace_size_bytes(base_path) else: workspace_bytes += 16384 * 1024 - for key in network.inputs: + for key in model.inputs: create_header_file( - f'{mangle_name(network.name, "input_data")}_{key}', - network.inputs[key], + f'{mangle_name(model.name, "input_data")}_{key}', + model.inputs[key], include_path, ) - for i in range(len(network.outputs)): + for i in range(len(model.outputs)): create_header_file( - (f'{mangle_name(network.name,"output_data")}{i}'), - np.zeros(network.outputs[i].shape, network.outputs[i].dtype), + (f'{mangle_name(model.name,"output_data")}{i}'), + np.zeros(model.outputs[i].shape, model.outputs[i].dtype), include_path, ) create_header_file( - (f'{mangle_name(network.name, "expected_output_data")}{i}'), - network.outputs[i], + (f'{mangle_name(model.name, "expected_output_data")}{i}'), + model.outputs[i], include_path, ) create_main( "test.c", - networks, + models, build_path, interface_api, workspace_bytes, diff --git a/tests/python/relay/aot/test_crt_aot.py b/tests/python/relay/aot/test_crt_aot.py index 3167cb4856f9..abbf350bff77 100644 --- a/tests/python/relay/aot/test_crt_aot.py +++ b/tests/python/relay/aot/test_crt_aot.py @@ -27,7 +27,7 @@ from tvm.relay import testing, transform from tvm.relay.testing import byoc from aot_test_utils import ( - AOTTestNetwork, + AOTTestModel, generate_ref_data, convert_to_relay, compile_and_run, @@ -45,7 +45,7 @@ def test_error_c_interface_with_packed_api(): with pytest.raises(tvm.TVMError, match="Packed interface required for packed operators"): compile_and_run( - AOTTestNetwork( + AOTTestModel( module=IRModule.from_expr(func), inputs={}, outputs=generate_ref_data(func, {}) ), interface_api, @@ -84,7 +84,7 @@ def @main(%data : Tensor[(1, 3, 64, 64), uint8], %weight : Tensor[(8, 3, 5, 5), output_list = generate_ref_data(mod, inputs, params) compile_and_run( - AOTTestNetwork(module=mod, inputs=inputs, outputs=output_list, params=params), + AOTTestModel(module=mod, inputs=inputs, outputs=output_list, params=params), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -106,7 +106,7 @@ def test_add_with_params(interface_api, use_unpacked_api, use_calculated_workspa output_list = generate_ref_data(func, inputs, params) compile_and_run( - AOTTestNetwork( + AOTTestModel( module=IRModule.from_expr(func), inputs=inputs, outputs=output_list, params=params ), interface_api, @@ -138,7 +138,7 @@ def test_conv2d(use_calculated_workspaces, interface_api, use_unpacked_api, grou output_list = generate_ref_data(mod, inputs) compile_and_run( - AOTTestNetwork(module=mod, inputs=inputs, outputs=output_list), + AOTTestModel(module=mod, inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -162,7 +162,7 @@ def test_concatenate(interface_api, use_unpacked_api, use_calculated_workspaces) output_list = generate_ref_data(func, inputs) compile_and_run( - AOTTestNetwork(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), + AOTTestModel(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -184,7 +184,7 @@ def test_nested_tuples(interface_api, use_unpacked_api, use_calculated_workspace output_list = generate_ref_data(func, inputs) compile_and_run( - AOTTestNetwork(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), + AOTTestModel(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -197,7 +197,7 @@ def test_tuple_getitem(interface_api, use_unpacked_api, use_calculated_workspace output_list = generate_ref_data(func, {}) compile_and_run( - AOTTestNetwork(module=IRModule.from_expr(func), inputs={}, outputs=output_list), + AOTTestModel(module=IRModule.from_expr(func), inputs={}, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -213,7 +213,7 @@ def test_id(interface_api, use_unpacked_api, use_calculated_workspaces): output_list = generate_ref_data(ident, inputs) compile_and_run( - AOTTestNetwork(module=IRModule.from_expr(ident), inputs=inputs, outputs=output_list), + AOTTestModel(module=IRModule.from_expr(ident), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -227,7 +227,7 @@ def test_add_const(interface_api, use_unpacked_api, use_calculated_workspaces): output_list = generate_ref_data(func, {}) compile_and_run( - AOTTestNetwork(module=IRModule.from_expr(func), inputs={}, outputs=output_list), + AOTTestModel(module=IRModule.from_expr(func), inputs={}, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -246,7 +246,7 @@ def test_mul_param(interface_api, use_unpacked_api, use_calculated_workspaces): output_list = generate_ref_data(func, inputs) compile_and_run( - AOTTestNetwork(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), + AOTTestModel(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -262,7 +262,7 @@ def test_subtract(interface_api, use_unpacked_api, use_calculated_workspaces): inputs = {"i": i_data} output_list = generate_ref_data(func, inputs) compile_and_run( - AOTTestNetwork(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), + AOTTestModel(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -281,7 +281,7 @@ def test_tuple_output(interface_api, use_unpacked_api, use_calculated_workspaces inputs = {"x": x_data} output_list = generate_ref_data(func, inputs) compile_and_run( - AOTTestNetwork(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), + AOTTestModel(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -301,7 +301,7 @@ def test_mobilenet(use_calculated_workspaces, workspace_byte_alignment): inputs = {"data": data} output_list = generate_ref_data(mod, inputs, params) compile_and_run( - AOTTestNetwork(module=mod, inputs=inputs, outputs=output_list, params=params), + AOTTestModel(module=mod, inputs=inputs, outputs=output_list, params=params), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -357,7 +357,7 @@ def test_byoc_microtvm(use_calculated_workspaces): input_list = [map_inputs["x"]] input_list.extend([map_inputs["w{}".format(i)] for i in range(8)]) compile_and_run( - AOTTestNetwork(name="my_mod", module=mod, inputs=map_inputs, outputs=output_list), + AOTTestModel(name="my_mod", module=mod, inputs=map_inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -379,9 +379,7 @@ def test_add_name_mangling_with_params(interface_api, use_unpacked_api, use_calc output_list = generate_ref_data(func, inputs, params) compile_and_run( - AOTTestNetwork( - name="my_mod", module=func, inputs=inputs, outputs=output_list, params=params - ), + AOTTestModel(name="my_mod", module=func, inputs=inputs, outputs=output_list, params=params), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -428,10 +426,10 @@ def @main(%data : Tensor[(1, 3, 64, 64), uint8], %weight : Tensor[(8, 3, 5, 5), compile_and_run( [ - AOTTestNetwork( + AOTTestModel( name="mod1", module=mod1, inputs=inputs1, outputs=output_list1, params=params1 ), - AOTTestNetwork( + AOTTestModel( name="mod2", module=mod2, inputs=inputs2, outputs=output_list2, params=params2 ), ], @@ -467,7 +465,7 @@ def test_quant_mobilenet_tfl(): inputs = {"input": data} output_list = generate_ref_data(mod, inputs, params) compile_and_run( - AOTTestNetwork(module=mod, inputs=inputs, outputs=output_list, params=params), + AOTTestModel(module=mod, inputs=inputs, outputs=output_list, params=params), interface_api, use_unpacked_api, use_calculated_workspaces, @@ -494,7 +492,7 @@ def test_transpose(interface_api, use_unpacked_api, use_calculated_workspaces): inputs = {"x": x_data, "y": y_data, "z": t_data} output_list = generate_ref_data(func, inputs) compile_and_run( - AOTTestNetwork(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), + AOTTestModel(module=IRModule.from_expr(func), inputs=inputs, outputs=output_list), interface_api, use_unpacked_api, use_calculated_workspaces, diff --git a/tests/python/relay/test_external_codegen.py b/tests/python/relay/test_external_codegen.py index 8b82f4931405..69e556791e5b 100644 --- a/tests/python/relay/test_external_codegen.py +++ b/tests/python/relay/test_external_codegen.py @@ -27,7 +27,7 @@ from tvm.contrib import utils from tvm.relay.build_module import bind_params_by_name from tvm.relay.op.annotation import compiler_begin, compiler_end -from aot.aot_test_utils import AOTTestNetwork, compile_and_run +from aot.aot_test_utils import AOTTestModel, compile_and_run def update_lib(lib): @@ -81,7 +81,7 @@ def check_aot_executor_result( use_unpacked_api = False use_calculated_workspaces = True compile_and_run( - AOTTestNetwork(module=mod, inputs=map_inputs, outputs=[result]), + AOTTestModel(module=mod, inputs=map_inputs, outputs=[result]), interface_api, use_unpacked_api, use_calculated_workspaces,