Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
45 commits
Select commit Hold shift + click to select a range
7ee0902
Code migration Start (#1)
jcf94 May 26, 2020
9fcbf0b
Split transform_step out & Update more UTs (#3)
jcf94 May 27, 2020
f43e82f
Add search_task, measure and serialization (#4)
jcf94 May 28, 2020
e0a5ed5
Add MetaTileRewritePolicy (#5)
jcf94 May 29, 2020
359905a
Basic Python API for State (#6)
jcf94 Jun 3, 2020
2032a64
Add Python API: Measure & Task (#7)
jcf94 Jun 4, 2020
6b21dc6
Add ansor.auto_schedule() API; First AutoSchedule working version(#8)
jcf94 Jun 4, 2020
e52135f
Bug fix & Add python serialization API (#10)
jcf94 Jun 5, 2020
1fe6638
Improve code style, python wrapper and test cases (#11)
merrymercy Jun 7, 2020
43d1530
fix unit tests
merrymercy Jun 8, 2020
f367d15
Add RPCRunner & OpenCL/CUDA test (#12)
jcf94 Jun 8, 2020
2bd6471
rebase to upstream/master
merrymercy Jun 8, 2020
c860f2c
Add Ansor basic tutorial (#13)
jcf94 Jun 8, 2020
f60d1a6
migrate feature extraction (#14)
merrymercy Jun 8, 2020
b839c0f
Add XGBModel & RPCRunnerWarpper (#15)
jcf94 Jun 9, 2020
cfe58d7
Migrate workload_registry.py (#16)
merrymercy Jun 9, 2020
143ea45
add task scheduler (#17)
merrymercy Jun 9, 2020
ed075c2
Add conv2d cuda tutorial with workload registry (#18)
jcf94 Jun 9, 2020
74ec7d0
add tune_test.py (the old tune_wkl.py) (#19)
merrymercy Jun 9, 2020
cd0a516
Code refine for tune_test.py & Add a pre load callback (#20)
jcf94 Jun 10, 2020
3a24e49
Add python custom sketch rule (#21)
jcf94 Jun 11, 2020
a155c1f
Ansor Relay Integration (without layout rewrite) (#22)
minminsun Jun 12, 2020
674027f
Add tune_op_subgraph.py & Some code clean for tune_network.py (#23)
jcf94 Jun 12, 2020
2f241ed
add explicit_unroll_max_extent (#25)
merrymercy Jun 12, 2020
18d44b8
Add Index simplification & API update (#26)
jcf94 Jun 15, 2020
4ea6712
Update PreLoadMeasuredStates & Some bug fix (#27)
jcf94 Jun 16, 2020
6126cdb
Add tensorize step for loop_state (#31)
jcf94 Jun 19, 2020
c7364df
State python api update (#33)
jcf94 Jun 19, 2020
36cd9ef
kernel layout rewrite (#28)
minminsun Jun 19, 2020
145e61c
[cache flush] port cache flush to ansor (#32)
FrozenGene Jun 19, 2020
2c27816
Improve relay integration (#34)
merrymercy Jun 20, 2020
0794875
Fix xgb error & Simplify dispatcher (#35)
merrymercy Jun 20, 2020
a4c4548
Rename "MetaTileRewritePolicy" to "SketchPolicy". (#36)
merrymercy Jun 20, 2020
593a2c7
rebase
merrymercy Jun 20, 2020
53bd591
Migrate all node::make to noderef's construct function (#37)
jcf94 Jun 22, 2020
8e53d12
Some lint fix & Recover the double constructor of tvm::PrimExpr (#39)
jcf94 Jun 23, 2020
cd5c5ad
Add MutateComputeLocation and MutateParallel in evolutionary search (…
merrymercy Jun 23, 2020
5860191
Improve loop state python API (stage_tensors -> stage_ops) (#41)
merrymercy Jun 23, 2020
14a19cd
ComputeDAG bug fix & Add Custom TensorCore Matmul Example (#42)
jcf94 Jun 24, 2020
59c88d1
Revert commit
jcf94 Jun 24, 2020
86bfd8f
Revert commits
jcf94 Jun 24, 2020
910964e
Rever Commits, Start to build minimum Ansor system
jcf94 Jun 24, 2020
d567617
Code clean for minimum Ansor system
jcf94 Jun 24, 2020
a8e589e
UT ready
jcf94 Jun 24, 2020
2456c3e
Update
jcf94 Jun 24, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,7 @@ tvm_t.*
.python_history
.pytest_cache
.local
cmake-build-debug

# Visual Studio Code
.vscode
Expand Down Expand Up @@ -233,3 +234,6 @@ conda/pkg
# antlr files
*.tokens
*.interp

# ansor tuning logs
scripts/*.json
1 change: 1 addition & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -185,6 +185,7 @@ assign_source_group("Include" ${GROUP_INCLUDE})

# Source file lists
file(GLOB_RECURSE COMPILER_SRCS
src/ansor/*.cc
src/node/*.cc
src/ir/*.cc
src/arith/*.cc
Expand Down
35 changes: 35 additions & 0 deletions python/tvm/ansor/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=unused-import, redefined-builtin
"""Namespace for Ansor auto-scheduler"""

from . import compute_dag
from . import measure
from . import serialization
from . import loop_state
from . import utils
from . import workload_registry

# Shortcut
from .compute_dag import ComputeDAG
from .auto_schedule import SearchTask, TuneOption, HardwareParams, \
auto_schedule, EmptyPolicy
from .measure import MeasureInput, LocalBuilder, LocalRunner
from .serialization import LogToFile, LogReader, best_measure_pair_in_file, \
load_from_file, write_measure_records_to_file
from .workload_registry import register_workload_func, \
workload_key_to_dag, make_workload_key_func
22 changes: 22 additions & 0 deletions python/tvm/ansor/_ffi_api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

"""Register FFI APIs from C++ for the namespace tvm.ansor"""
import tvm._ffi


tvm._ffi._init_api("ansor", __name__)
178 changes: 178 additions & 0 deletions python/tvm/ansor/auto_schedule.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,178 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

"""User interface for auto-scheduler"""

import random

import tvm._ffi
from tvm.runtime import Object
from .measure import LocalBuilder, LocalRunner
from . import _ffi_api


@tvm._ffi.register_object("ansor.HardwareParams")
class HardwareParams(Object):
""" The parameters of target hardware, this is used to guide the search process of
SearchPolicy.

Parameters
----------
num_cores : int
vector_unit_bytes : int
cache_line_bytes : int
max_unroll_vec : int
max_innermost_split_factor : int
"""
def __init__(self, num_cores, vector_unit_bytes, cache_line_bytes,
max_unroll_vec, max_innermost_split_factor):
self.__init_handle_by_constructor__(_ffi_api.HardwareParams, num_cores,
vector_unit_bytes, cache_line_bytes,
max_unroll_vec, max_innermost_split_factor)


@tvm._ffi.register_object("ansor.SearchTask")
class SearchTask(Object):
""" The meta-information of a search task

Parameters
----------
dag : ComputeDAG
workload_key : str
target : tvm.target.Target
target_host : tvm.target.Target
hardware_params : HardwareParams
"""
def __init__(self, dag, workload_key, target, target_host=None,
hardware_params=None):
self.__init_handle_by_constructor__(_ffi_api.SearchTask, dag,
workload_key, target, target_host,
hardware_params)


@tvm._ffi.register_object("ansor.SearchPolicy")
class SearchPolicy(Object):
""" The base class for search policy """


@tvm._ffi.register_object("ansor.EmptyPolicy")
class EmptyPolicy(SearchPolicy):
""" This is an example empty search policy which will always generate
the init state of target ComputeDAG.
"""
def __init__(self):
self.__init_handle_by_constructor__(_ffi_api.EmptyPolicy)


@tvm._ffi.register_object("ansor.SearchCallback")
class SearchCallback(Object):
""" Callback function before or after search process """


@tvm._ffi.register_object("ansor.TuneOption")
class TuneOption(Object):
""" The options for tuning

Parameters
----------
n_trials: int
Number of total measurement trials
early_stopping: int
Stops early the tuning if no improvement after n measurements
num_measure_per_iter: int
The number of programs to be measured at each iteration
verbose: int
Verbosity level. 0 means silent.
builder: Builder
Builder which builds the program
runner: Runner
Runner which runs the program and measure time costs
measure_callbacks: List[MeasureCallback]
Callback functions called after each measure
Candidates:
- ansor.LogToFile
pre_search_callbacks: List[SearchCallback]
Callback functions called before the search process
Candidates:
- ansor.PreloadMeasuredStates
- ansor.PreloadCustomSketchRule
"""
def __init__(self, n_trials=0, early_stopping=-1, num_measure_per_iter=64,
verbose=1, builder='local', runner='local', measure_callbacks=None,
pre_search_callbacks=None):
if isinstance(builder, str):
if builder == 'local':
builder = LocalBuilder()
else:
raise ValueError("Invalid builder: " + builder)

if isinstance(runner, str):
if runner == 'local':
runner = LocalRunner()
else:
raise ValueError("Invalid builder: " + runner)

if measure_callbacks is None:
measure_callbacks = []

if pre_search_callbacks is None:
pre_search_callbacks = []

self.__init_handle_by_constructor__(
_ffi_api.TuneOption, n_trials, early_stopping, num_measure_per_iter,
verbose, builder, runner, measure_callbacks, pre_search_callbacks)


def auto_schedule(workload, target=None,
target_host=None, search_policy='default',
hardware_params=None, tune_option=None):
""" Do auto scheduling for a computation declaration.

The workload parameter can be a `string` as workload_key, or directly
passing a `SearchTask` as input.

Parameters
----------
workload : Union[SearchTask, str]
target : Target
target_host : Target = None
search_policy : Union[SearchPolicy, str]
hardware_params : HardwareParams
tune_option : TuneOption

Returns
-------
sch : tvm.Schedule
tensors : List[Tensor]
"""
if isinstance(search_policy, str):
if search_policy == 'default':
search_policy = EmptyPolicy()
else:
raise ValueError("Invalid search policy: " + search_policy)

if tune_option is None:
tune_option = TuneOption(n_trials=0)

if isinstance(workload, str):
sch, tensors = _ffi_api.AutoScheduleByWorkloadKey(
workload, target, target_host, search_policy, hardware_params, tune_option)
return sch, tensors
if isinstance(workload, SearchTask):
sch, tensors = _ffi_api.AutoScheduleBySearchTask(workload, search_policy, tune_option)
return sch, tensors
raise ValueError("Invalid workload: " + workload + ". Expect a string or SearchTask")
77 changes: 77 additions & 0 deletions python/tvm/ansor/compute_dag.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.

""" Computational graph and its analysis tools """

import tvm._ffi
from tvm.runtime import Object
from .loop_state import State, StateObject
from . import _ffi_api


@tvm._ffi.register_object("ansor.ComputeDAG")
class ComputeDAG(Object):
"""
Computation declaration graph

Parameters
----------
tensors : List[Tensor]
"""
def __init__(self, tensors):
self.__init_handle_by_constructor__(_ffi_api.ComputeDAG, tensors)

def get_init_state(self):
""" Get init state of this ComputeDAG

Returns
-------
state : State
"""
return State(_ffi_api.ComputeDAGGetInitState(self), self)

def apply_steps_from_state(self, state):
"""
Apply transform steps according to the history of a state

Parameters
----------
state : StateObject
layout_rewrite_level : LayoutRewriteLevel

Returns
-------
sch : Schedule
args : List[Tensor]
"""
state_obj = state if isinstance(state, StateObject) else state.state_object
return _ffi_api.ComputeDAGApplyStepsFromState(self, state_obj)

def print_python_code_from_state(self, state):
"""
Print transform steps in the history of a state as TVM's python schedule primitive

Parameters
----------
state : StateObject

Returns
-------
str : Str
"""
state_obj = state if isinstance(state, StateObject) else state.state_object
return _ffi_api.ComputeDAGPrintPythonCodeFromState(self, state_obj)
Loading