Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
208 changes: 71 additions & 137 deletions tests/python/relay/aot/aot_test_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import shutil
import subprocess
import tarfile
from typing import NamedTuple, Union, Optional, List, Dict

import pytest
import numpy as np
Expand All @@ -39,6 +40,30 @@
_LOG = logging.getLogger(__name__)


class AOTTestModel(NamedTuple):
"""Class to describe a model under test

Parameters
----------
module: tvm.IRModule
IRModule to generate AOT executor for
inputs: Dict[str, np.array]
Dict of input names to value arrays
outputs: List[np.array]
Ordered list of output value arrays
name: str
Name to use for this model
params: Optional[Dict[str, np.array]]
Dict of parameter names to value arrays
"""

module: tvm.IRModule
inputs: Dict[str, np.array]
outputs: List[np.array]
name: str = "default"
params: Optional[Dict[str, np.array]] = None


def mangle_name(mod_name, name):
mod_name = mangle_module_name(mod_name)
return mod_name + "_" + name
Expand Down Expand Up @@ -303,40 +328,34 @@ def emit_main_micro_include(main_file, mod_name):
main_file.write(f"#include <{mangle_module_name(mod_name)}.h>\n")


def create_main(test_name, input_map, output_list_map, output_path, interface_api, workspace_bytes):
def create_main(test_name, models, output_path, interface_api, workspace_bytes):
file_path = pathlib.Path(f"{output_path}/" + test_name).resolve()
# create header file
raw_path = file_path.with_suffix(".c").resolve()
with open(raw_path, "w") as main_file:
emit_main_common_includes(main_file)

if interface_api == "c":
for mod_name in input_map:
emit_main_micro_include(main_file, mod_name)
for model in models:
emit_main_micro_include(main_file, model.name)

emit_main_prologue(main_file, workspace_bytes)
for mod_name in input_map:
emit_main_data(main_file, input_map[mod_name], output_list_map[mod_name], mod_name)
for model in models:
emit_main_data(main_file, model.inputs, model.outputs, model.name)
emit_main_init_memory_manager(main_file)

if interface_api == "c":
for mod_name in input_map:
emit_main_data_structs(
main_file, input_map[mod_name], output_list_map[mod_name], mod_name
)
emit_main_c_interface_call(main_file, mod_name)
for model in models:
emit_main_data_structs(main_file, model.inputs, model.outputs, model.name)
emit_main_c_interface_call(main_file, model.name)
else:
emit_main_fake_packed_values(main_file)
for mod_name in input_map:
emit_main_data_setup(
main_file, input_map[mod_name], output_list_map[mod_name], mod_name
)
emit_main_packed_call(
main_file, input_map[mod_name], output_list_map[mod_name], mod_name
)

for mod_name in input_map:
emit_main_compare(main_file, output_list_map[mod_name], mod_name)
for model in models:
emit_main_data_setup(main_file, model.inputs, model.outputs, model.name)
emit_main_packed_call(main_file, model.inputs, model.outputs, model.name)

for model in models:
emit_main_compare(main_file, model.outputs, model.name)
emit_main_epilogue(main_file)


Expand Down Expand Up @@ -369,22 +388,18 @@ def create_header_file(tensor_name, npy_data, output_path):
header_file.write("};\n\n")


def extract_main_workspace_sizebytes(extract_dir):
def extract_main_workspace_size_bytes(extract_dir):
with open(os.path.join(extract_dir, "metadata.json")) as json_f:
metadata = json.load(json_f)
return metadata["memory"]["functions"]["main"][0]["workspace_size_bytes"]


def compile_and_run(
mod,
inputs,
output_list,
models: Union[List[AOTTestModel], AOTTestModel],
interface_api,
use_unpacked_api,
use_calculated_workspaces,
params=None,
workspace_byte_alignment=8,
mod_name="default",
enable_op_fusion=True,
):
"""
Expand All @@ -395,6 +410,9 @@ def compile_and_run(
target = f"{base_target} {extra_target}"
cflags = f"-DTVM_RUNTIME_ALLOC_ALIGNMENT_BYTES={workspace_byte_alignment} "

if not isinstance(models, list):
models = [models]

# The calculated workspaces will not account for stack allocator tags used for debugging
if not use_calculated_workspaces:
cflags += "-DTVM_CRT_STACK_ALLOCATOR_ENABLE_LIFO_CHECK "
Expand All @@ -403,25 +421,13 @@ def compile_and_run(
if not enable_op_fusion:
config["relay.FuseOps.max_depth"] = 1

with tvm.transform.PassContext(opt_level=3, config=config):
lib = tvm.relay.build(mod, target, target_host=target, params=params, mod_name=mod_name)

tmp_path = utils.tempdir()
tmp_dir = tmp_path.temp_dir

base_path = os.path.join(tmp_dir, "test")
build_path = os.path.join(base_path, "build")
os.makedirs(build_path, exist_ok=True)

tar_file = os.path.join(base_path, "test.tar")
export_model_library_format(lib, tar_file)
t = tarfile.open(tar_file)
t.extractall(base_path)
if use_calculated_workspaces:
workspace_bytes = extract_main_workspace_sizebytes(base_path)
else:
workspace_bytes = 16384 * 1024

include_path = os.path.join(base_path, "include")
os.mkdir(include_path)
crt_root = tvm.micro.get_standalone_crt_dir()
Expand All @@ -430,132 +436,60 @@ def compile_and_run(
os.path.join(include_path, "crt_config.h"),
)

for key in inputs:
create_header_file(
f'{mangle_name(mod_name, "input_data")}_{key}',
inputs[key],
os.path.join(base_path, "include"),
)

for i in range(len(output_list)):
create_header_file(
f'{mangle_name(mod_name,"output_data")}{i}',
np.zeros(output_list[i].shape, output_list[i].dtype),
os.path.join(base_path, "include"),
)
create_header_file(
f'{mangle_name(mod_name, "expected_output_data")}{i}',
output_list[i],
os.path.join(base_path, "include"),
)

create_main(
"test.c",
{mod_name: inputs},
{mod_name: output_list},
build_path,
interface_api,
workspace_bytes,
)

# Verify that compiles fine
file_dir = os.path.dirname(os.path.abspath(__file__))
codegen_path = os.path.join(base_path, "codegen")
makefile = os.path.join(file_dir, "aot_test.mk")
make_cmd = (
f"make CFLAGS='{cflags}' -f {makefile} build_dir="
+ build_path
+ f" TVM_ROOT={file_dir}/../../../.."
+ f" CODEGEN_ROOT={codegen_path}"
+ f" STANDALONE_CRT_DIR={tvm.micro.get_standalone_crt_dir()}"
)

compile_log_path = os.path.join(build_path, "test_compile.log")
ret = subprocess_log_output(make_cmd, ".", compile_log_path)
assert ret == 0

# Verify that runs fine
run_log_path = os.path.join(build_path, "test_run.log")
ret = subprocess_log_output("./aot_test_runner", build_path, run_log_path)
assert ret == 0


def compile_and_run_multiple_models(
mod_map,
input_list_map,
output_list_map,
interface_api,
use_unpacked_api,
use_calculated_workspaces,
param_map,
workspace_byte_alignment=8,
):
"""
This method verifies the generated source
"""
base_target = "c -runtime=c --link-params --executor=aot"
extra_target = f"--workspace-byte-alignment={workspace_byte_alignment} --interface-api={interface_api} --unpacked-api={int(use_unpacked_api)}"
target = f"{base_target} {extra_target}"
tmp_path = utils.tempdir()
tmp_dir = tmp_path.temp_dir

base_path = os.path.join(tmp_dir, "test")
build_path = os.path.join(base_path, "build")
os.makedirs(build_path, exist_ok=True)

include_path = os.path.join(base_path, "include")
os.mkdir(include_path)
crt_root = tvm.micro.get_standalone_crt_dir()
shutil.copy2(
os.path.join(crt_root, "template", "crt_config-template.h"),
os.path.join(include_path, "crt_config.h"),
)

for mod_name, mod in mod_map.items():

with tvm.transform.PassContext(opt_level=3, config={"tir.disable_vectorize": True}):
workspace_bytes = 0
for model in models:
with tvm.transform.PassContext(opt_level=3, config=config):
lib = tvm.relay.build(
mod, target, target_host=target, params=param_map[mod_name], mod_name=mod_name
model.module,
target,
target_host=target,
params=model.params,
mod_name=model.name,
)

tar_file = os.path.join(base_path, "test.tar")
tar_file = os.path.join(base_path, f"{model.name}.tar")
export_model_library_format(lib, tar_file)
t = tarfile.open(tar_file)
t.extractall(base_path)

input_list = input_list_map[mod_name]
output_list = output_list_map[mod_name]
if use_calculated_workspaces:
workspace_bytes += extract_main_workspace_size_bytes(base_path)
else:
workspace_bytes += 16384 * 1024

for key in input_list:
for key in model.inputs:
create_header_file(
(f'{mangle_name(mod_name,"input_data")}_{key}'), input_list[key], build_path
f'{mangle_name(model.name, "input_data")}_{key}',
model.inputs[key],
include_path,
)

for i in range(len(output_list_map[mod_name])):
for i in range(len(model.outputs)):
create_header_file(
(f'{mangle_name(mod_name,"output_data")}{i}'),
np.zeros(output_list[i].shape, output_list[i].dtype),
build_path,
(f'{mangle_name(model.name,"output_data")}{i}'),
np.zeros(model.outputs[i].shape, model.outputs[i].dtype),
include_path,
)
create_header_file(
(f'{mangle_name(mod_name,"expected_output_data")}{i}'), output_list[i], build_path
(f'{mangle_name(model.name, "expected_output_data")}{i}'),
model.outputs[i],
include_path,
)

create_main(
"test.c",
input_list_map,
output_list_map,
models,
build_path,
interface_api,
workspace_bytes=16384 * 1024,
workspace_bytes,
)

# Verify that compiles fine
file_dir = os.path.dirname(os.path.abspath(__file__))
codegen_path = os.path.join(base_path, "codegen")
makefile = os.path.join(file_dir, "aot_test.mk")
make_cmd = (
f"make -f {makefile} build_dir="
f"make CFLAGS='{cflags}' -f {makefile} build_dir="
+ build_path
+ f" TVM_ROOT={file_dir}/../../../.."
+ f" CODEGEN_ROOT={codegen_path}"
Expand Down
Loading