Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions python/tvm/testing/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from .utils import known_failing_targets, requires_cuda, requires_cudagraph
from .utils import requires_gpu, requires_llvm, requires_rocm, requires_rpc
from .utils import requires_tensorcore, requires_metal, requires_micro, requires_opencl
from .utils import requires_package
from .utils import identity_after, terminate_self

from ._ffi_api import nop, echo, device_test, run_check_signal, object_use_count
Expand Down
44 changes: 44 additions & 0 deletions python/tvm/testing/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -774,7 +774,51 @@ def requires_rpc(*args):
return _compose(args, _requires_rpc)


def requires_package(*packages):
"""Mark a test as requiring python packages to run.

If the packages listed are not available, tests marked with
`requires_package` will appear in the pytest results as being skipped.
This is equivalent to using ``foo = pytest.importorskip('foo')`` inside
the test body.

Parameters
----------
packages : List[str]

The python packages that should be available for the test to
run.

Returns
-------
mark: pytest mark

The pytest mark to be applied to unit tests that require this

"""

def has_package(package):
try:
__import__(package)
return True
except ImportError:
return False

marks = [
pytest.mark.skipif(not has_package(package), reason=f"Cannot import '{package}'")
for package in packages
]

def wrapper(func):
for mark in marks:
func = mark(func)
return func

return wrapper


def parametrize_targets(*args):

"""Parametrize a test over a specific set of targets.

Use this decorator when you want your test to be run over a
Expand Down
28 changes: 27 additions & 1 deletion tests/python/contrib/test_coreml_codegen.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,11 +19,12 @@
from unittest import mock

import tvm
import tvm.testing
from tvm import relay
from tvm.relay import transform
from tvm.contrib.target import coreml as _coreml

pytest.importorskip("coremltools")
requires_coremltools = tvm.testing.requires_package("coremltools")


def _has_xcode():
Expand Down Expand Up @@ -88,6 +89,11 @@ def _create_graph_annotated():
return mod


@pytest.mark.xfail(
reason="Currently failing test. See tracking issue https://github.com/apache/tvm/issues/8901"
)
@tvm.testing.uses_gpu
@requires_coremltools
def test_annotate():
mod = _create_graph()
mod = transform.AnnotateTarget("coremlcompiler")(mod)
Expand All @@ -98,6 +104,8 @@ def test_annotate():


@pytest.mark.skipif(not _has_xcode(), reason="Xcode is not available")
@tvm.testing.uses_gpu
@requires_coremltools
def test_compile_and_run():
dev = tvm.cpu()
target = "llvm"
Expand Down Expand Up @@ -136,6 +144,8 @@ def _construct_model(func, m1, m2):
fcompile(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_add():
shape = (10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -144,6 +154,8 @@ def test_add():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_multiply():
shape = (10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -152,6 +164,8 @@ def test_multiply():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_clip():
shape = (10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -160,6 +174,8 @@ def test_clip():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_batch_flatten():
shape = (10, 10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -168,6 +184,8 @@ def test_batch_flatten():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_expand_dims():
shape = (10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -180,6 +198,8 @@ def test_expand_dims():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_relu():
shape = (10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -188,6 +208,8 @@ def test_relu():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_softmax():
shape = (10, 10)
x = relay.var("x", shape=shape)
Expand All @@ -196,6 +218,8 @@ def test_softmax():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_conv2d():
x = relay.var("x", shape=(1, 3, 224, 224))
w = relay.const(np.zeros((16, 3, 3, 3), dtype="float32"))
Expand All @@ -204,6 +228,8 @@ def test_conv2d():
_construct_model(func)


@tvm.testing.uses_gpu
@requires_coremltools
def test_global_avg_pool2d():
shape = (10, 10, 10, 10)
x = relay.var("x", shape=shape)
Expand Down
Loading