diff --git a/docker/Dockerfile.ci_adreno b/docker/Dockerfile.ci_adreno new file mode 100644 index 000000000000..a08b2dfe8c64 --- /dev/null +++ b/docker/Dockerfile.ci_adreno @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# CI docker GPU env +FROM tlcpack/ci-gpu:20220908-060034-62bdc91b1 + +COPY utils/apt-install-and-clear.sh /usr/local/bin/apt-install-and-clear + +# Android SDK +COPY install/ubuntu_install_androidsdk.sh /install/ubuntu_install_androidsdk.sh +RUN bash /install/ubuntu_install_androidsdk.sh +ENV ANDROID_HOME=/opt/android-sdk-linux +ENV ANDROID_NDK_HOME=/opt/android-sdk-linux/ndk/21.3.6528147 +ENV PATH /opt/android-sdk-linux/platform-tools:$PATH diff --git a/docker/bash.sh b/docker/bash.sh index 10d80478d3f7..3f8f3d8baba4 100755 --- a/docker/bash.sh +++ b/docker/bash.sh @@ -161,6 +161,7 @@ function parse_error() { break_joined_flag='if (( ${#1} == 2 )); then shift; else set -- -"${1#-i}" "${@:2}"; fi' DOCKER_ENV=( ) +DOCKER_FLAGS=( ) while (( $# )); do case "$1" in @@ -184,6 +185,11 @@ while (( $# )); do shift ;; + --net) + DOCKER_FLAGS+=( --net "$2" ) + shift 2 + ;; + --mount) if [[ -n "$2" ]]; then MOUNT_DIRS+=("$2") @@ -212,6 +218,11 @@ while (( $# )); do shift 2 ;; + --volume) + DOCKER_FLAGS+=( --volume "$2" ) + shift 2 + ;; + --dry-run) DRY_RUN=true shift @@ -284,7 +295,6 @@ fi source "$(dirname $0)/dev_common.sh" || exit 2 -DOCKER_FLAGS=( ) DOCKER_MOUNT=( ) DOCKER_DEVICES=( ) @@ -460,7 +470,6 @@ echo "" echo Running \'${COMMAND[@]+"${COMMAND[@]}"}\' inside ${DOCKER_IMAGE_NAME}... - DOCKER_CMD=(${DOCKER_BINARY} run ${DOCKER_FLAGS[@]+"${DOCKER_FLAGS[@]}"} ${DOCKER_ENV[@]+"${DOCKER_ENV[@]}"} diff --git a/docker/install/ubuntu_install_cmake_source.sh b/docker/install/ubuntu_install_cmake_source.sh index 702130f07964..db0f990e0f8d 100755 --- a/docker/install/ubuntu_install_cmake_source.sh +++ b/docker/install/ubuntu_install_cmake_source.sh @@ -32,7 +32,7 @@ wget https://cmake.org/files/v${v}/cmake-${version}.tar.gz tar xvf cmake-${version}.tar.gz cd cmake-${version} ./bootstrap -make -j"$(nproc)" +make -j$(nproc) make install cd .. rm -rf cmake-${version} cmake-${version}.tar.gz diff --git a/python/tvm/testing/utils.py b/python/tvm/testing/utils.py index ad1e003d6e3f..3a177f7cfd80 100644 --- a/python/tvm/testing/utils.py +++ b/python/tvm/testing/utils.py @@ -899,8 +899,8 @@ def _any_gpu_exists(): "OpenCL", cmake_flag="USE_OPENCL", target_kind_enabled="opencl", - target_kind_hardware="opencl", - parent_features="gpu", + target_kind_hardware="opencl" if "RPC_TARGET" not in os.environ else None, + parent_features="gpu" if "RPC_TARGET" not in os.environ else None, ) # Mark a test as requiring the rocm runtime @@ -933,6 +933,15 @@ def _any_gpu_exists(): parent_features="gpu", ) +# Mark a test as requiring OpenCLML support in build. +requires_openclml = Feature( + "OpenCLML", + "CLML", + cmake_flag="USE_CLML", + target_kind_enabled="opencl", +) + + # Mark a test as requiring microTVM to run requires_micro = Feature("micro", "MicroTVM", cmake_flag="USE_MICRO") diff --git a/tests/python/contrib/test_clml/conftest.py b/tests/python/contrib/test_clml/conftest.py new file mode 100644 index 000000000000..a51fc8edf107 --- /dev/null +++ b/tests/python/contrib/test_clml/conftest.py @@ -0,0 +1,26 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import sys +import tvm +import pytest +from test_clml.infrastructure import Device + + +@pytest.fixture(scope="session") +def device(): + return Device() diff --git a/tests/python/contrib/test_clml/infrastructure.py b/tests/python/contrib/test_clml/infrastructure.py index 08b11525ecd2..12accda3fda5 100644 --- a/tests/python/contrib/test_clml/infrastructure.py +++ b/tests/python/contrib/test_clml/infrastructure.py @@ -73,12 +73,12 @@ class Device: """ connection_type = "tracker" - host = "localhost" - port = 9150 + host = os.getenv("TVM_TRACKER_HOST", "localhost") + port = int(os.getenv("TVM_TRACKER_PORT", 9090)) target = "opencl" target_host = "llvm -mtriple=aarch64-linux-gnu" device_key = "android" - cross_compile = "aarch64-linux-android-g++" + cross_compile = os.getenv("TVM_NDK_CC", "aarch64-linux-android-g++") def __init__(self): """Keep remote device for lifetime of object.""" @@ -100,43 +100,6 @@ def _get_remote(cls): return device - @classmethod - def load(cls, file_name): - """Load test config - - Load the test configuration by looking for file_name relative - to the test_clml directory. - """ - location = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__))) - config_file = os.path.join(location, file_name) - if not os.path.exists(config_file): - warnings.warn("Config file doesn't exist, resuming CLML tests with default config.") - return - with open(config_file, mode="r") as config: - test_config = json.load(config) - - cls.connection_type = test_config["connection_type"] - cls.host = test_config["host"] - cls.port = test_config["port"] - cls.target = test_config["target"] - cls.target_host = test_config["target_host"] - cls.device_key = test_config.get("device_key") or "" - cls.cross_compile = test_config.get("cross_compile") or "" - - -def skip_runtime_test(): - """Skip test if it requires the runtime and it's not present.""" - # CLML codegen not present. - if not tvm.get_global_func("relay.ext.clml", True): - print("Skip because CLML codegen is not available.") - return True - - # Remote device is in use or CLML runtime not present - # Note: Ensure that the device config has been loaded before this check - if not Device.connection_type != "local" and not clml.is_clml_runtime_enabled(): - print("Skip because runtime isn't present or a remote device isn't being used.") - return True - def skip_codegen_test(): """Skip test if it requires the CLML codegen and it's not present.""" diff --git a/tests/python/contrib/test_clml/test_network.py b/tests/python/contrib/test_clml/test_network.py index 95f3a45baf78..8d740d6dce4d 100644 --- a/tests/python/contrib/test_clml/test_network.py +++ b/tests/python/contrib/test_clml/test_network.py @@ -16,13 +16,13 @@ # under the License. """OpenCL ML network tests.""" +import tvm import numpy as np -import pytest -from tvm import testing from tvm import relay - -import tvm -from test_clml.infrastructure import skip_runtime_test, build_and_run, Device +from tvm.relay import testing +from tvm.contrib import utils +from test_clml.infrastructure import build_and_run, Device +import pytest def _build_and_run_network(mod, params, inputs, data, device, atol, rtol, tvm_log=""): @@ -59,15 +59,9 @@ def get_bottom_top_model(model, layer_name): return mod, params, ref_output -def test_mobilenet(): - Device.load("test_config.json") - - if skip_runtime_test(): - return - - device = Device() - dtype = "float16" - +@pytest.mark.parametrize("dtype", ["float16"]) +@tvm.testing.requires_openclml +def test_mobilenet(device, dtype): def get_model(): from tensorflow.keras.applications import MobileNet import tensorflow as tf @@ -107,15 +101,9 @@ def get_model(): tvm.testing.assert_allclose(opencl_sort[:10], clml_sort[:10], rtol=1e-5, atol=1e-5) -def test_inception_v3(): - Device.load("test_config.json") - - if skip_runtime_test(): - return - - device = Device() - dtype = "float16" - +@pytest.mark.parametrize("dtype", ["float16"]) +@tvm.testing.requires_openclml +def test_inception_v3(device, dtype): def get_model(): from tensorflow.keras.applications import InceptionV3 import tensorflow as tf @@ -150,15 +138,9 @@ def get_model(): tvm.testing.assert_allclose(opencl_sort[:5], clml_sort[:5], rtol=1e-5, atol=1e-5) -def test_resnet50v2(): - Device.load("test_config.json") - - if skip_runtime_test(): - return - - device = Device() - dtype = "float16" - +@pytest.mark.parametrize("dtype", ["float16"]) +@tvm.testing.requires_openclml +def test_resnet50v2(device, dtype): def get_model(): from tensorflow.keras.applications import ResNet50V2 import tensorflow as tf @@ -202,9 +184,3 @@ def get_model(): clml_sort = np.argsort(outputs[0].asnumpy()).flatten() tvm.testing.assert_allclose(opencl_sort[:10], clml_sort[:10], rtol=1e-5, atol=1e-5) - - -if __name__ == "__main__": - test_mobilenet() - test_resnet50v2() - test_inception_v3() diff --git a/tests/python/contrib/test_clml/test_ops.py b/tests/python/contrib/test_clml/test_ops.py index d14a5ec6e90d..d2431d2dfd3b 100644 --- a/tests/python/contrib/test_clml/test_ops.py +++ b/tests/python/contrib/test_clml/test_ops.py @@ -16,21 +16,14 @@ # under the License. """CLML integration conv2d tests.""" -import numpy as np - -np.random.seed(0) - import tvm -from tvm import testing +import numpy as np from tvm import relay +from tvm.relay import testing from tvm.ir import IRModule - -from test_clml.infrastructure import ( - skip_runtime_test, - skip_codegen_test, - build_and_run, - Device, -) +from tvm.contrib import utils +from test_clml.infrastructure import build_and_run, Device, skip_codegen_test +import pytest def _get_conv_model( @@ -98,17 +91,9 @@ def _get_conv_model( return out, params -def test_conv2d(): - Device.load("test_config.json") - - if skip_runtime_test(): - return - - device = Device() - np.random.seed(0) - - dtype = "float32" - +@pytest.mark.parametrize("dtype", ["float32"]) +@tvm.testing.requires_openclml +def test_conv2d(device, dtype): trials = [ # Normal convolution [3, 3, (1, 1), (1, 1), (1, 1), 4, (14, 10, 10), (False, False, False)], @@ -168,17 +153,9 @@ def test_conv2d(): ) -def test_batchnorm(): - Device.load("test_config.json") - - if skip_runtime_test(): - return - - device = Device() - np.random.seed(0) - - dtype = "float32" - +@pytest.mark.parametrize("dtype", ["float16"]) +@tvm.testing.requires_openclml +def _test_batchnorm(device, dtype): in_shape = (1, 8, 64, 64) channels = 8 @@ -211,14 +188,9 @@ def test_batchnorm(): ) -def test_concat(): - Device.load("test_config.json") - - if skip_runtime_test(): - return - - device = Device() - dtype = "float16" +@pytest.mark.parametrize("dtype", ["float16"]) +@tvm.testing.requires_openclml +def test_concat(device, dtype): in_shape_1 = (1, 16, 16, 16) in_shape_2 = (1, 16, 16, 16) a = relay.var("input_1", shape=in_shape_1, dtype=dtype) @@ -241,14 +213,9 @@ def test_concat(): ) -def test_avgpool(): - Device.load("test_config.json") - - if skip_runtime_test(): - return - - device = Device() - dtype = "float16" +@pytest.mark.parametrize("dtype", ["float16"]) +@tvm.testing.requires_openclml +def test_avgpool(device, dtype): trials = [ # input size pool_size stride paading [(1, 64, 147, 147), (3, 3), (2, 2), (0, 0, 0, 0), "max"], @@ -288,10 +255,3 @@ def test_avgpool(): tvm.testing.assert_allclose( clml_out[0].asnumpy(), opencl_out[0].asnumpy(), rtol=1e-3, atol=1e-3 ) - - -if __name__ == "__main__": - test_conv2d() - # test_batchnorm() - test_avgpool() - test_concat() diff --git a/tests/python/relay/opencl_texture/test_conv2d_nchw_texture.py b/tests/python/relay/opencl_texture/test_conv2d_nchw_texture.py index 504a2b4e3ed3..0513a2d3f663 100644 --- a/tests/python/relay/opencl_texture/test_conv2d_nchw_texture.py +++ b/tests/python/relay/opencl_texture/test_conv2d_nchw_texture.py @@ -479,7 +479,6 @@ def test_conv2d_winograd_conv(target, dtype): @tvm.testing.requires_opencl @tvm.testing.parametrize_targets("opencl -device=adreno") -@pytest.mark.skipif(tvm.testing.utils.IS_IN_CI, reason="failed due to nvidia libOpencl in the CI") def test_residual_block(target, dtype): """ - some kind of residual block followed by convolution to have texture after residual block @@ -569,20 +568,33 @@ def test_residual_block(target, dtype): "weight2": tvm.nd.array(filter_data2), "weight3": tvm.nd.array(filter_data3), } - - static_memory_scope = [ - "global", - "global.texture", - "global.texture-weight", - "global.texture-weight", - "global.texture", - "global.texture-weight", - "global", - "global.texture", - "global.texture-weight", - "", - "", - ] + if dtype == "float16": + static_memory_scope = [ + "global", + "global.texture", + "global.texture-weight", + "global.texture-weight", + "global.texture", + "global.texture-weight", + "global", + "global.texture", + "global.texture-weight", + "", + "", + ] + else: + static_memory_scope = [ + "global", + "global.texture", + "global.texture-weight", + "global.texture-weight", + "global.texture", + "global.texture-weight", + "global.texture", + "global.texture-weight", + "", + "", + ] build_run_compare(mod, params1, {"data": input_shape}, dtype, target, static_memory_scope) diff --git a/tests/scripts/ci.py b/tests/scripts/ci.py index 31b7316d88d8..02ef7b888b80 100755 --- a/tests/scripts/ci.py +++ b/tests/scripts/ci.py @@ -147,7 +147,14 @@ def gen_name(s: str) -> str: return f"{s}-{suffix}" -def docker(name: str, image: str, scripts: List[str], env: Dict[str, str], interactive: bool): +def docker( + name: str, + image: str, + scripts: List[str], + env: Dict[str, str], + interactive: bool, + additional_flags: Dict[str, str], +): """ Invoke a set of bash scripts through docker/bash.sh @@ -169,6 +176,7 @@ def docker(name: str, image: str, scripts: List[str], env: Dict[str, str], inter "ci_arm", "ci_hexagon", "ci_riscv", + "ci_adreno", } if image in sccache_images and os.getenv("USE_SCCACHE", "1") == "1": @@ -196,6 +204,10 @@ def docker(name: str, image: str, scripts: List[str], env: Dict[str, str], inter command.append("--env") command.append(f"{key}={value}") + for key, value in additional_flags.items(): + command.append(key) + command.append(value) + SCRIPT_DIR.mkdir(exist_ok=True) script_file = SCRIPT_DIR / f"{name}.sh" @@ -345,6 +357,7 @@ def generate_command( help: str, precheck: Optional[Callable[[], None]] = None, post_build: Optional[List[str]] = None, + additional_flags: Dict[str, str] = {}, ): """ Helper to generate CLIs that: @@ -411,6 +424,7 @@ def fn( "VERBOSE": "true" if verbose else "false", }, interactive=interactive, + additional_flags=additional_flags, ) fn.__name__ = name @@ -687,6 +701,24 @@ def add_subparser( ), }, ), + generate_command( + name="adreno", + help="Run Adreno build and test(s)", + post_build=["./tests/scripts/task_build_adreno_bins.sh"], + additional_flags={ + "--volume": os.environ.get("ADRENO_OPENCL", "") + ":/adreno-opencl", + "--env": "ADRENO_OPENCL=/adreno-opencl", + "--net": "host", + }, + options={ + "test": ( + "run Adreno API/Python tests", + [ + "./tests/scripts/task_python_adreno.sh " + os.environ.get("ANDROID_SERIAL", ""), + ], + ), + }, + ), ] diff --git a/tests/scripts/task_build_adreno_bins.sh b/tests/scripts/task_build_adreno_bins.sh new file mode 100755 index 000000000000..5d453251606a --- /dev/null +++ b/tests/scripts/task_build_adreno_bins.sh @@ -0,0 +1,53 @@ +#!/bin/bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -e +set -u +set -x + +output_directory=$(realpath ${PWD}/build-adreno-target) +rm -rf ${output_directory} + +mkdir -p ${output_directory} +cd ${output_directory} + +cp ../cmake/config.cmake . + +echo set\(USE_CLML ON\) >> config.cmake +echo set\(USE_CLML_GRAPH_EXECUTOR "${ADRENO_OPENCL}"\) >> config.cmake +echo set\(USE_RPC ON\) >> config.cmake +echo set\(USE_CPP_RPC ON\) >> config.cmake +echo set\(USE_GRAPH_EXECUTOR ON\) >> config.cmake +echo set\(USE_LIBBACKTRACE AUTO\) >> config.cmake + +echo set\(ANDROID_ABI arm64-v8a\) >> config.cmake +echo set\(ANDROID_PLATFORM android-28\) >> config.cmake +echo set\(MACHINE_NAME aarch64-linux-gnu\) >> config.cmake + +cmake -DCMAKE_TOOLCHAIN_FILE="${ANDROID_NDK_HOME}/build/cmake/android.toolchain.cmake" \ + -DANDROID_ABI=arm64-v8a \ + -DANDROID_PLATFORM=android-28 \ + -DCMAKE_SYSTEM_VERSION=1 \ + -DCMAKE_FIND_ROOT_PATH="${ADRENO_OPENCL}" \ + -DCMAKE_FIND_ROOT_PATH_MODE_PROGRAM=NEVER \ + -DCMAKE_FIND_ROOT_PATH_MODE_LIBRARY=ONLY \ + -DCMAKE_CXX_COMPILER="${ANDROID_NDK_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/aarch64-linux-android28-clang++" \ + -DCMAKE_C_COMPILER="${ANDROID_NDK_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/aarch64-linux-android28-clang" \ + -DMACHINE_NAME="aarch64-linux-gnu" .. + +make -j$(nproc) tvm_rpc diff --git a/tests/scripts/task_config_build_adreno.sh b/tests/scripts/task_config_build_adreno.sh new file mode 100755 index 000000000000..d45c5e8b7dcf --- /dev/null +++ b/tests/scripts/task_config_build_adreno.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -euxo pipefail + +BUILD_DIR=$1 +mkdir -p "$BUILD_DIR" +cd "$BUILD_DIR" +cp ../cmake/config.cmake . + +echo set\(USE_OPENCL ON\) >> config.cmake +echo set\(USE_CLML ON\) >> config.cmake +echo set\(USE_RPC ON\) >> config.cmake +echo set\(USE_GRAPH_EXECUTOR ON\) >> config.cmake +echo set\(USE_LIBBACKTRACE AUTO\) >> config.cmake +echo set\(USE_LLVM ON\) >> config.cmake diff --git a/tests/scripts/task_python_adreno.sh b/tests/scripts/task_python_adreno.sh new file mode 100755 index 000000000000..2b131ec762be --- /dev/null +++ b/tests/scripts/task_python_adreno.sh @@ -0,0 +1,65 @@ +#!/usr/bin/env bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +set -euxo pipefail + +export TVM_TEST_TARGETS="opencl" +export TVM_RELAY_OPENCL_TEXTURE_TARGETS="opencl -device=adreno" + +source tests/scripts/setup-pytest-env.sh +export PYTHONPATH=${PYTHONPATH}:${TVM_PATH}/apps/extension/python +export LD_LIBRARY_PATH="build:${LD_LIBRARY_PATH:-}" +export TVM_INTEGRATION_TESTSUITE_NAME=python-integration-adreno + +export TVM_TRACKER_HOST=127.0.0.1 +export TVM_TRACKER_PORT=$(((RANDOM % 100) + 9100)) +export RPC_TARGET="adreno" +export TVM_NDK_CC="${ANDROID_NDK_HOME}/toolchains/llvm/prebuilt/linux-x86_64/bin/aarch64-linux-android28-clang" + +env PYTHONPATH=python python3 -m tvm.exec.rpc_tracker --host "${TVM_TRACKER_HOST}" --port "${TVM_TRACKER_PORT}" & +TRACKER_PID=$! +sleep 5 # Wait for tracker to bind + +export ANDROID_SERIAL=$1 + +adb shell "mkdir -p /data/local/tmp/tvm_ci" +adb push build-adreno-target/tvm_rpc /data/local/tmp/tvm_ci/tvm_rpc_ci +adb push build-adreno-target/libtvm_runtime.so /data/local/tmp/tvm_ci + +adb reverse tcp:${TVM_TRACKER_PORT} tcp:${TVM_TRACKER_PORT} +adb forward tcp:5000 tcp:5000 +adb forward tcp:5001 tcp:5001 +adb forward tcp:5002 tcp:5002 +env adb shell "cd /data/local/tmp/tvm_ci; killall -9 tvm_rpc_ci; sleep 2; LD_LIBRARY_PATH=/data/local/tmp/tvm_ci/ ./tvm_rpc_ci server --host=0.0.0.0 --port=5000 --port-end=5010 --tracker=127.0.0.1:${TVM_TRACKER_PORT} --key=android" & +DEVICE_PID=$! +sleep 5 # Wait for the device connections +trap "{ kill ${TRACKER_PID}; kill ${DEVICE_PID}; }" 0 + +# cleanup pycache +find . -type f -path "*.pyc" | xargs rm -f +# Test TVM +make cython3 + +# OpenCL texture test on Adreno +run_pytest ctypes ${TVM_INTEGRATION_TESTSUITE_NAME}-opencl-texture tests/python/relay/opencl_texture + +# Adreno CLML test +run_pytest ctypes ${TVM_INTEGRATION_TESTSUITE_NAME}-openclml tests/python/contrib/test_clml + +kill ${TRACKER_PID} +kill ${DEVICE_PID}