Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 19 additions & 2 deletions python/tvm/driver/tvmc/compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,11 @@ def add_compile_parser(subparsers, _):
type=parse_pass_list_str,
default="",
)
parser.add_argument(
"--module-name",
default="default",
help="The output module name. Defaults to 'default'.",
)


def drive_compile(args):
Expand Down Expand Up @@ -179,6 +184,7 @@ def drive_compile(args):
disabled_pass=args.disabled_pass,
pass_context_configs=args.pass_config,
additional_target_options=reconstruct_target_args(args),
mod_name=args.module_name,
)

return 0
Expand All @@ -202,6 +208,7 @@ def compile_model(
pass_context_configs: Optional[List[str]] = None,
additional_target_options: Optional[Dict[str, Dict[str, Any]]] = None,
use_vm: bool = False,
mod_name: Optional[str] = "default",
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

perhaps "default" should be a constant somewhere?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Thanks @areusch , I agree it would be good to have "default" as a constant.
It feels like this should be a separate PR though.

):
"""Compile a model from a supported framework into a TVM module.

Expand Down Expand Up @@ -251,6 +258,8 @@ def compile_model(
Additional target options in a dictionary to combine with initial Target arguments
use_vm: bool
Whether to use the VM to compile the model as opposed to the graph executor
mod_name: str, optional
The module name

Returns
-------
Expand All @@ -275,7 +284,7 @@ def compile_model(
if codegen["config_key"] is not None:
config[codegen["config_key"]] = codegen_from_cli["opts"]
with tvm.transform.PassContext(config=config):
mod = partition_function(mod, params, **codegen_from_cli["opts"])
mod = partition_function(mod, params, mod_name=mod_name, **codegen_from_cli["opts"])

if tuning_records and os.path.exists(tuning_records):
logger.debug("tuning records file provided: %s", tuning_records)
Expand All @@ -300,6 +309,7 @@ def compile_model(
runtime=runtime,
params=params,
use_vm=use_vm,
mod_name=mod_name,
)
else:
with autotvm.apply_history_best(tuning_records):
Expand All @@ -314,6 +324,7 @@ def compile_model(
runtime=runtime,
params=params,
use_vm=use_vm,
mod_name=mod_name,
)
else:
with tvm.transform.PassContext(
Expand All @@ -327,6 +338,7 @@ def compile_model(
runtime=runtime,
params=params,
use_vm=use_vm,
mod_name=mod_name,
)

# Generate output dump files with sources
Expand Down Expand Up @@ -364,6 +376,7 @@ def build(
runtime: Runtime,
params: Dict[str, tvm.nd.NDArray],
use_vm: bool,
mod_name: str,
):
"""
Builds the model with the provided executor.
Expand All @@ -383,13 +396,17 @@ def build(
A parameter dictionary for the model.
use_vm: bool
Whether to use the VM to compile the model as opposed to the graph executor
mod_name: str
The module name

"""
if use_vm:
logger.debug("building with vm compile")
return relay.vm.compile(mod, target=tvm_target, params=params)
logger.debug("building with relay build")
return relay.build(mod, target=tvm_target, executor=executor, runtime=runtime, params=params)
return relay.build(
mod, target=tvm_target, executor=executor, runtime=runtime, params=params, mod_name=mod_name
)


def save_dumps(module_name: str, dumps: Dict[str, str], dump_root: str = "."):
Expand Down
2 changes: 1 addition & 1 deletion python/tvm/driver/tvmc/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -393,7 +393,7 @@ def import_package(self, package_path: str):

has_graph_executor = "graph" in metadata["executors"]
graph = temp.relpath("executor-config/graph/graph.json") if has_graph_executor else None
params = temp.relpath("parameters/default.params")
params = temp.relpath(f'parameters/{metadata["model_name"]}.params')

self.type = "mlf"
else:
Expand Down
6 changes: 4 additions & 2 deletions python/tvm/relay/op/contrib/cmsisnn.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def enabled():
return "cmsis-nn" in Target.list_kinds()


def partition_for_cmsisnn(mod, params=None, **opts):
def partition_for_cmsisnn(mod, params=None, mod_name="default", **opts):
"""Partition the graph greedily offloading supported
operators on Cortex-M using CMSIS-NN

Expand All @@ -41,6 +41,8 @@ def partition_for_cmsisnn(mod, params=None, **opts):
The module to run passes on.
params : Optional[Dict[str, NDArray]]
Constant input parameters.
mod_name: str, optional
The module name

Returns
-------
Expand All @@ -55,7 +57,7 @@ def partition_for_cmsisnn(mod, params=None, **opts):
transform.InferType(),
transform.MergeComposite(pattern_table()),
transform.AnnotateTarget("cmsis-nn"),
transform.PartitionGraph(),
transform.PartitionGraph(mod_name=mod_name),
GenerateCMSISNNConstants(),
ScalarToTensorConstants(),
ExtractConstantsFromPartitionedFunction(),
Expand Down
9 changes: 7 additions & 2 deletions python/tvm/relay/op/contrib/ethosu.py
Original file line number Diff line number Diff line change
Expand Up @@ -1767,7 +1767,10 @@ def pattern_table() -> List[Tuple[str, tvm.relay.dataflow_pattern.DFPattern, Cal
# pylint: disable=unused-argument
@requires_vela
def partition_for_ethosu(
mod: tvm.ir.IRModule, params: Optional[Dict[str, tvm.runtime.NDArray]] = None, **opts
mod: tvm.ir.IRModule,
params: Optional[Dict[str, tvm.runtime.NDArray]] = None,
mod_name: str = "default",
**opts,
):
"""This helper function partition the relay graph as produced by the
relay frontend for a given model into external functions
Expand All @@ -1779,6 +1782,8 @@ def partition_for_ethosu(
The IRModule that gets generated from a relay frontend
params : Optional[Dict[str, tvm.runtime.NDArray]]
Constant input parameters.
mod_name: str, optional
The module name

Returns
-------
Expand All @@ -1796,7 +1801,7 @@ def partition_for_ethosu(
mod = relay.transform.AnnotateTarget("ethos-u")(mod)
mod = relay.transform.MergeCompilerRegions()(mod)
mod = relay.transform.InferType()(mod)
mod = relay.transform.PartitionGraph()(mod)
mod = relay.transform.PartitionGraph(mod_name)(mod)
mod = relay.transform.InferType()(mod)
mod = preprocess.preprocess_ext_io()(mod)
return mod
148 changes: 148 additions & 0 deletions tests/python/driver/tvmc/test_compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -552,6 +552,154 @@ def test_compile_check_configs_composite_target(mock_pkg, mock_pc, mock_fe, mock
)


def test_compile_tflite_module_with_mod_name(tmpdir_factory, tflite_cnn_s_quantized):
pytest.importorskip("tflite")

output_dir = tmpdir_factory.mktemp("mlf")
tvmc_model = tvmc.load(tflite_cnn_s_quantized)

output_file_name = f"{output_dir}/file.tar"

tvmc.compiler.compile_model(
tvmc_model,
target=f"c -mcpu=cortex-m55",
runtime=Runtime("crt", {"system-lib": True}),
executor=Executor("aot"),
output_format="mlf",
package_path=output_file_name,
pass_context_configs=["tir.disable_vectorize=true"],
mod_name="classify",
)

# check that an MLF package was created
assert os.path.exists(output_file_name)

with tarfile.open(output_file_name) as mlf_package:
# check that the C source files have been named classify_lib*.c
c_source_files = [
name
for name in mlf_package.getnames()
if re.match(r"\./codegen/host/src/classify_lib\d+\.c", name)
]
assert len(c_source_files) > 0

# check that "default" doesn't occur in any of the C source files
# check that function names are of the form "tvmgen_classify_*"
for file_name in c_source_files:
with mlf_package.extractfile(file_name) as f:
content = f.read()
assert b"default" not in content
assert b"tvmgen_classify_" in content

# check that tvmgen_classify_run() function exists
with mlf_package.extractfile("./codegen/host/src/classify_lib0.c") as f:
content = f.read()
assert b"tvmgen_classify_run(" in content


@tvm.testing.requires_cmsisnn
def test_compile_tflite_module_with_mod_name_and_cmsisnn(tmpdir_factory, tflite_cnn_s_quantized):
pytest.importorskip("tflite")

output_dir = tmpdir_factory.mktemp("mlf")
tvmc_model = tvmc.load(tflite_cnn_s_quantized)

output_file_name = f"{output_dir}/file.tar"

tvmc.compiler.compile_model(
tvmc_model,
target=f"cmsis-nn, c -mcpu=cortex-m55",
runtime=Runtime("crt", {"system-lib": True}),
executor=Executor("aot"),
output_format="mlf",
package_path=output_file_name,
pass_context_configs=["tir.disable_vectorize=true"],
mod_name="classify",
)

# check that an MLF package was created
assert os.path.exists(output_file_name)

with tarfile.open(output_file_name) as mlf_package:
# check that the C source files have been named classify_lib*.c
c_source_files = [
name
for name in mlf_package.getnames()
if re.match(r"\./codegen/host/src/classify_lib\d+\.c", name)
]
assert len(c_source_files) > 0

# check that "default" doesn't occur in any of the C source files
# check that function names are of the form "tvmgen_classify_*"
for file_name in c_source_files:
with mlf_package.extractfile(file_name) as f:
content = f.read()
assert b"default" not in content
assert b"tvmgen_classify_" in content

# check that tvmgen_classify_run() function exists
with mlf_package.extractfile("./codegen/host/src/classify_lib0.c") as f:
content = f.read()
assert b"tvmgen_classify_run(" in content

# check that CMSIS-NN function names are of the form "tvmgen_classify_cmsis_nn_main_*"
with mlf_package.extractfile("./codegen/host/src/classify_lib2.c") as f:
content = f.read()
assert b"tvmgen_classify_cmsis_nn_main_" in content


def test_compile_tflite_module_with_mod_name_and_ethosu(
tmpdir_factory, tflite_mobilenet_v1_1_quant
):
pytest.importorskip("tflite")
pytest.importorskip("ethosu.vela")

output_dir = tmpdir_factory.mktemp("mlf")
tvmc_model = tvmc.load(tflite_mobilenet_v1_1_quant)
output_file_name = f"{output_dir}/file.tar"

tvmc.compiler.compile_model(
tvmc_model,
target=f"ethos-u -accelerator_config=ethos-u55-256, c -mcpu=cortex-m55",
runtime=Runtime("crt"),
executor=Executor("aot", {"unpacked-api": True}),
output_format="mlf",
package_path=output_file_name,
pass_context_configs=["tir.disable_vectorize=true"],
mod_name="classify",
)

# check that an MLF package was created
assert os.path.exists(output_file_name)

with tarfile.open(output_file_name) as mlf_package:
# check that the C source files have been named classify_lib*.c
c_source_files = [
name
for name in mlf_package.getnames()
if re.match(r"\./codegen/host/src/classify_lib\d+\.c", name)
]
assert len(c_source_files) > 0

# check that "default" doesn't occur in any of the C source files
# check that function names are of the form "tvmgen_classify_*"
for file_name in c_source_files:
with mlf_package.extractfile(file_name) as f:
content = f.read()
assert b"default" not in content
assert b"tvmgen_classify_" in content

# check that tvmgen_classify_run() function exists
with mlf_package.extractfile("./codegen/host/src/classify_lib0.c") as f:
content = f.read()
assert b"tvmgen_classify_run(" in content

# check that microNPU function names are of the form "tvmgen_classify_ethos_u_main_*"
with mlf_package.extractfile("./codegen/host/src/classify_lib2.c") as f:
content = f.read()
assert b"tvmgen_classify_ethos_u_main_" in content


if __name__ == "__main__":
import sys

Expand Down