From a41fffaf5f0cad8bab1d569d831ff96647f2f428 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Thu, 20 Sep 2018 23:35:04 -0700 Subject: [PATCH 01/28] Add first pass at language reference from old branch --- docs/langref/index.rst | 1 + docs/langref/relay/expressions.rst | 161 +++++++++++++++++++++++ docs/langref/relay/index.rst | 17 +++ docs/langref/relay/intro.rst | 203 +++++++++++++++++++++++++++++ docs/langref/relay/type_system.rst | 187 ++++++++++++++++++++++++++ 5 files changed, 569 insertions(+) create mode 100644 docs/langref/relay/expressions.rst create mode 100644 docs/langref/relay/index.rst create mode 100644 docs/langref/relay/intro.rst create mode 100644 docs/langref/relay/type_system.rst diff --git a/docs/langref/index.rst b/docs/langref/index.rst index 22ca00f7faa5..757835c94cb9 100644 --- a/docs/langref/index.rst +++ b/docs/langref/index.rst @@ -8,3 +8,4 @@ embedded languages in TVM stack. relay_op hybrid_script + relay/index diff --git a/docs/langref/relay/expressions.rst b/docs/langref/relay/expressions.rst new file mode 100644 index 000000000000..3be5dadd3fdb --- /dev/null +++ b/docs/langref/relay/expressions.rst @@ -0,0 +1,161 @@ +================== +Expressions +================== + +The Relay IR is a pure, expression oriented language, with a dataflow fragment +and structured control flow. Although Relay's representation is a tree, it is +possible to view the dataflow fragments as graph for purposes of writing and +expressing transformations. + +The below sections make an attempt to clearly split the dataflow +fragment from the control fragment. + +================== +Dataflow Expressions +================== + +First we will cover the set of nodes which do not involve control flow, +this fragment of the language is semantically equivalent to pure +computation graphs without control flow. + +Constants +~~~~~~~~~ + +Relay programs can contain constant Tensor values. This node represents +a constant tensor value (values are either Tensors, Products, or Closures in Relay). +The constants are represented as :py:class:`~tvm.NDArray`, allowing us to utilize +TVM operators for constant evaluation. + +See :py:class:`~tvm.relay.expr.Constant` for its definition and documentation. + +Tuple +~~~~~~~~~~~~~~~ + +We support tuple constructors; the tuple node builds a finite (i.e statically known size) sequence of +heterogenous data. These tuples match closely to Python's and enable efficient projection of their m +embers due to their fixed length. + +.. code-block:: python + + (a, b, c) : Tuple + + (a + b + c, d) : Tuple, Tensor> + +See :py:class:`~tvm.relay.expr.Tuple` for its definition and documentation. + +Function +~~~~~~~~ + +A function node represents a function, it contains a seqeuence of +parameters, a return type, and a body. + +.. code-block:: python + + fun (x : Float, y: Float) -> Float { x + y } + +Functions are first class in Relay, and can be used in any expression +position. Functions are the same as global functions, but do not have +an explicit name. You can use a function in conjunction with a let +binding to define locally recursive functions. + +.. code-block:: python + + let fact = fun (x : Float) -> Float { + if (x == 0) { + 0 + } else { + x * fact(x - 1) + }; + fact(10) + +See :py:class:`~tvm.relay.expr.Function` for its definition and documentation. + +Variables +~~~~~~~~~~~ + +Both global variables, and local variables, are valid expressions, one may use them +anywhere an expression may appear. + +For example the below fragment of code is a valid expression. + +.. code-block:: python + %ret = @global(op_name, %local) + +See :py:class:`~tvm.relay.expr.LocalVar` and :py:class:`~tvm.expr.GlobalVar` for its definition +and documentation. + +Let Binding +~~~~~~~~~~~ + +An immutable variable binding, allows the user to bind an +expression to a name. A let binding contains a local variable, +an optional type annotation, a value, and body expression +which may reference the bound identifier. + +We will first introduce a single binding with no type +anntoations: + +.. code-block:: python + let %x = %a + %b; + x + +The value of a let binding is the value of the final expression +after evaluating the bindings it depends on. + +A user can write a sequence of let bindings, we can view +these blocks and pure dataflow +single binding. These blocks are pure dataflow, and can +be evaluated in any order, reordered up to dataflow. + +We support a sequence of bindings followed by a body which +is the continutation after executing the sequence of bindings. + +I believe this representation will be easier to manipulate then +the mixed dataflow/control flow comptuation graphs. +Data flow and control flow are strictly seperated in this representation +and we can easily syntactically discriminate. When in ANF there should only be +general control flow between `Assignment` nodes and not within the values bound +in bindings. + +This representation also makes it easy to apply reverse more since +sequences of assignments where the only control flow is call instructions +are treated by the algorithm uniformly, and each control flow construct +must be handled individualy. + +See :py:class:`~tvm.relay.expr.Let` for its definition and documentation. + +======================= +Control Flow Expression +======================= + +Control flow expressions change network topology based on values +computed by previous expressions. + +Call +~~~~ + +Terms with function types in Relay are "callable", i.e they can be invoked like +a function in a typical programming language by supplying a set of arguments. + +All Relay functions are typed with function types, as well as all Relay operators. + +.. code-block:: python + fact(10) + +See :py:class:`~tvm.relay.expr.Call` for its definition and documentation. + +If-Then-Else +~~~~~~~~~~~~ + +Relay has a simple if/then/else expression which allows programs to branch +on a single control value which must be of type :code:`bool`, i.e a zero-rank +tensor of booleans (:code:`Tensor[(), bool]`). + +.. code-block:: python + if (sum(equal(t, u))) { + jreturn x: + } else { + return y; + } + +See :py:class:`~tvm.relay.expr.If` for its definition and documentation. diff --git a/docs/langref/relay/index.rst b/docs/langref/relay/index.rst new file mode 100644 index 000000000000..617e745acdfc --- /dev/null +++ b/docs/langref/relay/index.rst @@ -0,0 +1,17 @@ +Relay Language Reference +======================== + +This document is a work in progress language reference describing +Relay, TVM's high level intermediate representation. The name is an +allusion to interneurons which are often referred to as intermediate, +or relay neurons. + +We will continually iterate on this document as we evolve the new IR +and update accordingly. + +.. toctree:: + :maxdepth: 2 + + intro + expressions + type_system diff --git a/docs/langref/relay/intro.rst b/docs/langref/relay/intro.rst new file mode 100644 index 000000000000..4b568c675358 --- /dev/null +++ b/docs/langref/relay/intro.rst @@ -0,0 +1,203 @@ +================== +Introduction +================== + +Relay is differentiable programming language with support for +closures, control-flow, and recursion. It has an advanced +static type system specifically designed for programs written by machine +learning practioniers and researchers. + +Relay is intended to replace computation graph based +intermediate representations currently employed by deep +learning frameworks and compilers. The deep learning community +has organically evolved from a representation that was useful +for the limited form of computation originally desired, i.e +a DAG of primitive functions. + +This may be a useful way to describe computation, but is limited once +you want to generalize to dynamic models. + +The conflation of the description of a computation, the +representation used for optimizing it, and the data structure +used to execute it uncessarily hampers many of the goals +of a framework. + +We believe having a high level, an expressive language designed +for compiler optimizations is essential to the future of an +end-to-end deep learning compiler stack. + +Relay's design was influenced by the authors previous experince +building advanced optimizing compilers for high level languages, +as well as experinces with the current TVM stack, and NNVM. + +Concretely NNVM has played the role of this high level IR. + +for NNVM which address design flaws of the computation graph IRs +employed by common deep learning frameworks. +We address a few important challenges. First we present a new IR with first +class functions and closures, enabling the design of differentiable +arbitrary control flow constructs. Second we contribute a type system for +Relay which makes the important properties of computations involving tensors +explicit. + +The result is a high-level IR for differentiable comptuation that can +be used to abstract over the details of execution including devices, +parallelism, and distribution. + +We believe Relay will enable researchers and industry to design a +new class of DL framework frontends, differentiable programming languages, +deep probablistic programming languages, thus accelerating the pace of +research in applications. + +================== +Language +================== + +Relay is a purely functional, differentiable intermediate representation. + +================== +IR Reference +================== + +The IR has a global environment which stores the set of definitions, +constants, options, attributes, and provides access to features like +the type inferencer, constant evaluator, and more. + +## Node + +The fundmental unit of the IR is the node, all nodes must have +a type field. + +.. code-block:: python + class Node: + def checked_type() -> Type: + ... + +================== +Variables +================== + +Relay has three variable references local, global, and operators. Our design draws inspiration +from LLVM which differentiates between identifier types. This enables writers of +optimizations to know precisely what an identifier references without needing information +beyond the type of identifier. + +Globals are written with `@`, locals are written with `%`, and operators are written with +no sigil like LLVM's intrinsics. The distinction between global and local identifiers +makes certain kinds of transformation easier. For example inlining a global definition +requires no analysis, you can write a pass that just directly inlines the defintions. +It also ensures there are no spooky action at a distance, introducing a new identifier +of any type will never introduce an ambiguity to the program. + + +Global Variable +~~~~~~~~~~~~~~~~~~ + +Global identifiers are prefixed by the ::@ sigil. A global identifier always +references a globally visibly definition contained in the environment. You +can write a global identifier as ::@global. + +Local Variable +~~~~~~~~~~~~~~~~~ + +Local identifiers are prefixed by the :code:`%` sigil. A local identifier always +references a parameter, or let bound expression. You can write a local +identifier as :code:`%local`. + + +================== +Global Functions +================== + +A definition consists of a name, type parameter, parameters, and an optional return +type. A Relay definition is similar to a procedure or function in a typical programming +language, but can also be viewed as a named subgraph. + +A definition minimally consists of an identifier :code:`@id`, an empty set of +parameters, and a body expression contained by curly braces + +.. code-block:: python + + def @id() { body } + +A definiton may also contain any number of parameters, for example a +simple function which just adds two tensors + +.. code-block:: python + + def @add(%x, %y) { %x + %y } + +It is also possible for us to annotate explicit types on definitions, for example +we can restrict the above definition to only work on certain types + +.. code-block:: python + + def @add(%x: Tensor, %y: Tensor) -> Tensor { + %x + %y + } + +A parameter is just a pairing of a :py:class:`~tvm.relay.expr.LocalVar` and optional :py:class:`~tvm.relay.type.Type`. They represent +the formal parameters of functions and definitions, and are written as :code:`%x : T`. + +They may only appear in function literals, and definitions, and have no relation +to parameters in the machine learning. + +When the type information is omitted we will attempt to infer a most general type +for the users. This property is known as generalization, for a definition without +explicit annotations, we will attempt to assign the most general type. When the +return type is omitted we will infer the return type based on the text of the +program. + +Finally we can directly construct type polymorphic definitions by writing down +a set of type parameters for a definition. To define a polymoprhic identity +function, the function which just returns its argument as so. +:: + def @id(%x: Tensor) { + %x + } + +Notice we can omit the return type, and it will still be inferred. + +*Note: this is not yet implemented.* + +Finally we allo a definition be prefixed by metadata, which adds +extra properties to the definition. + +It is important to be able to annotate metadata that is external to +the computational behavior of a definition. For example we can use +this to add an `inline` or `noinline` attribute which the compiler +can consider when performing inlining. + +For example we can set the attributes for :code:`@id_real`.:: + + + attributes id_real { + inline: true + } + + def id_real(%x:Real) { ret %x } + + +================== +Operators +================== + +A primitive operation that is not defined in the Relay language but provided +externally. Currently we back these operators registrations with the operators +exposed by TVM's TOPI. An operator requires a user to provide an implementation +of the operator, its type and various attributes required by Relay subsystems. + +The input methods for Relay programs do not provide a way to describe operators in +Relay, they must be explicitly registered in the global environment via Python or C++. + +Operators are rendered without a sigil (e.g :code:`add`, :code:`subtract`) when pretty +printing Relay programs. Operators are explicitly contained in the program and are uniquely +identifiable by pointer during a run of the Relay compiler. + +Programs +~~~~~~~~ + +Now that we have presented both global functions, and operators we have +everthing in hand to describe a complete Relay program. A Relay program consists of a +registry of operators, one or more functions, as well as the global configuration +stored in the environment. \ No newline at end of file diff --git a/docs/langref/relay/type_system.rst b/docs/langref/relay/type_system.rst new file mode 100644 index 000000000000..aef53d334010 --- /dev/null +++ b/docs/langref/relay/type_system.rst @@ -0,0 +1,187 @@ +================== +Type System +================== + +We have briefly introduced types while detailing the the expression language +of Relay, but have fully laid out the type system. + +Although the majority of Relay programs are written without type annotations, +Relay is statically typed. + +Static types are useful because they enable efficient layout, memory reuse, and +code generation. They aid in debugging program transformations, but can also +give us the expressivity afforded by more dynamic langauges. + +We are able to omit these type annotations by a process known as type inference. +Type inference is a technique that has its roots in the programming language +community, and can be viewed as a method for generalizing shape inference to +run over arbitrary user programs containing control flow and recursion. + +Static types are useful when performing compiler optimization because they +communicate properties about the data we manipulate, such as runtime shape, +data layout, storage without needing to run the program. + +Most current IRs use "shape inference" to recover Tensor dimensions from the user +provided program. Machine learning users have enjoyed shape inference for +tensors because it allows them to generate performant code without giving up +on the expressivity of the input language. + +Because Relay is intended as an IR we require *some* type information to provide +full inference. We don't believe this to be an issue as many of the IR builder +inferfaces require some type information, or can generate IR based on their own +higher level inferences. + +We view this limited shape inference as a simpler form of type +inference. Instead of relying on an ad-hoc procedure for recovering type +information from a potentially dynamic program, we apply ideas from compiler and IR design. + +Below we briefly dicsuss the different kinds of types in Relay. + +===== +Types +===== + +Relay's type system has a "language of types" which allow us to write down the type of +a Relay program. Below we detail the langauge of types and how we assign them to Relay +programs. + +Type +~~~~ +The base type for all Relay types. All Relay types are sub-classes of this base type. + +See :py:class:`~tvm.relay.type.Type` for its definition and documentation. + +Tensor Type +~~~~~~~~~~ + +A concrete TensorType in Relay, see tvm/relay/type.h for more details. + +This is the type assigned to tensor's with a known dype and shape. For +example a tensor of `float32` and `(5, 5)`. + + + +Tensor values in Relay are typed with tensor types. A tensor type is +parametrized by a data type, and shape. The data type must be a base +type as enforced by the kind checking rules described in TODO. + +This restriction importantly means + +The shape may be any valid Relay shape as described in the below +section on shapes. + + +See :py:class:`~tvm.relay.type.TensorType` for its definition and documentation. + +Kind +~~~~ +The kind of a type parameter, represents a variable shape, +base type, type, or dimension. + +This controls what a type parameter is allowed to be instantiated +with. For example one's of kind BaseType can only be `float32`, +`int32`, and so on. + +See :py:class:`~tvm.relay.type.Kind` for its definition and documentation. + +Type Parameter +~~~~~~~~~~~~~~ + +A type parameter used for generic types in Relay, +see tvm/relay/type.h for more details. + +A type parameter represents a type placeholder which will +be filled in later on. This allows the user to write +functions which are generic over types. + +See :py:class:`~tvm.relay.type.TypeParam` for its definition and documentation. + +Type Constriant +~~~~~~~~~~~~~~~ + +Abstract class representing a type constraint, to be elaborated +on in further releases. + +See :py:class:`~tvm.relay.type.TypeConstraint` for its definition and documentation. + +Function Type +~~~~~~~~~~~~~ +A function type in Relay, see tvm/relay/type.h for more details. + +This is the type assigned to functions in Relay. They consist of +a list of type parameters which enable the definition of generic +functions, a set of type constraints which we omit for the time +being, a sequence of argument types, and a return type. + +We informally write them as: +`forall (type_params), (arg_types) -> ret_type where type_constraints` + +See :py:class:`~tvm.relay.type.FuncType` for its definition and documentation. + +Type Relation +~~~~~~~~~~~~~ + +A type relation is the most exotic type system feature in Relay. It allows +users to extend type and shape checking/inference with new rules. We use +type relations to type operators with "hard" types such as broadcasting +operators, or special ones like :code:`flatten`. + +A type relation :code:`R` is a n-ary input, single output relation over +types. To unpack that, it allows us to specify a relationship between +a set of input and output types. + +For example we can define the identity relation to be: + +.. code-block:: prolog + Identity(I, I) :- true + +Or we can define the relation for flatten: + +.. code-block:: prolog + Flatten(Tensor(sh, bt), O) :- + O = Tensor(sh[0], prod(sh[1:])) + +The above examples are written in Prolog-like syntax but currently the relations +must be implemented by users in C++ or Python. + +If we have a relation such as :code:`Broadcast` it becomes possible to type things +such as :code:`elemwise_add`: + +.. code-block:: python + elemwise_add : forall (Lhs : Type) (Rhs : Type), (Lhs, Rhs) -> Broadcast(Lhs, Rhs) + +You might ask why we write the relation in the return type but we use it as a +notational convenience for: + +.. code-block:: python + elemwise_add : forall (Lhs : Type) (Rhs : Type) (Out : Type), Broadcast(Lhs, Rhs, Out) => (Lhs, Rhs) -> Out + +That is the user may pick the type of the :code:`Lhs`, :code:`Rhs`, and :code:`Out` as long as we can +show :code:`Broadcast(Lhs, Rhs, Out)` holds. + +See :py:class:`~tvm.relay.type.TypeRelation` for its definition and documentation. + +Type Call +~~~~~~~~~ + +Apply a type relation to a set of input arguments, at the present momen the type +call node represents the application of a :py:class:`~tvm.relay.type.TypeRelation` +to a set of input arguments. The result of type application is the output variable +of the type relation. + +See :py:class:`~tvm.relay.type.TypeCall` for its definition and documentation. + +Incomplete Type +~~~~~~~~~~~~~~~ + +A type, or portion of a type which is not known yet. Only used during type inference. + +.. note:: Known as a "type variable" in the type checking literature. + +See :py:class:`~tvm.relay.type.IncompleteType` for its definition and documentation. + + + + + + From 5c7fa5e062116d0e7ecab14b3b7c57415fafb071 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Thu, 20 Sep 2018 23:37:41 -0700 Subject: [PATCH 02/28] Add docs for Python API from previous branch. --- docs/api/python/relay/base.rst | 9 +++++++ docs/api/python/relay/env.rst | 6 +++++ docs/api/python/relay/expr.rst | 36 ++++++++++++++++++++++++++++ docs/api/python/relay/index.rst | 20 ++++++++++++++++ docs/api/python/relay/ir_builder.rst | 6 +++++ docs/api/python/relay/ir_pass.rst | 4 ++++ docs/api/python/relay/op.rst | 3 +++ docs/api/python/relay/to_tvm.rst | 4 ++++ docs/api/python/relay/type.rst | 27 +++++++++++++++++++++ 9 files changed, 115 insertions(+) create mode 100644 docs/api/python/relay/base.rst create mode 100644 docs/api/python/relay/env.rst create mode 100644 docs/api/python/relay/expr.rst create mode 100644 docs/api/python/relay/index.rst create mode 100644 docs/api/python/relay/ir_builder.rst create mode 100644 docs/api/python/relay/ir_pass.rst create mode 100644 docs/api/python/relay/op.rst create mode 100644 docs/api/python/relay/to_tvm.rst create mode 100644 docs/api/python/relay/type.rst diff --git a/docs/api/python/relay/base.rst b/docs/api/python/relay/base.rst new file mode 100644 index 000000000000..f0cec295ee6b --- /dev/null +++ b/docs/api/python/relay/base.rst @@ -0,0 +1,9 @@ +tvm.relay.base +----------- +.. automodule:: tvm.relay.base + +.. autoclass:: tvm.relay.base.NodeBase + :members: + +.. autoclass:: tvm.relay.base.Span + :members: \ No newline at end of file diff --git a/docs/api/python/relay/env.rst b/docs/api/python/relay/env.rst new file mode 100644 index 000000000000..eca7312d5bbb --- /dev/null +++ b/docs/api/python/relay/env.rst @@ -0,0 +1,6 @@ +tvm.relay.env +----------- +.. automodule:: tvm.relay.env + +.. autoclass:: tvm.relay.env.Environment + :members: \ No newline at end of file diff --git a/docs/api/python/relay/expr.rst b/docs/api/python/relay/expr.rst new file mode 100644 index 000000000000..cd0cb5c308c4 --- /dev/null +++ b/docs/api/python/relay/expr.rst @@ -0,0 +1,36 @@ +tvm.relay.expr +----------- +.. automodule:: tvm.relay.expr + +.. autoclass:: tvm.relay.expr.ExprBuilder + :members: + +.. autoclass:: tvm.relay.expr.Expr + :members: + +.. autoclass:: tvm.relay.expr.Constant + :members: + +.. autoclass:: tvm.relay.expr.Tuple + :members: + +.. autoclass:: tvm.relay.expr.LocalVar + :members: + +.. autoclass:: tvm.relay.expr.GlobalVar + :members: + +.. autoclass:: tvm.relay.expr.Param + :members: + +.. autoclass:: tvm.relay.expr.Function + :members: + +.. autoclass:: tvm.relay.expr.Call + :members: + +.. autoclass:: tvm.relay.expr.Let + :members: + +.. autoclass:: tvm.relay.expr.If + :members: \ No newline at end of file diff --git a/docs/api/python/relay/index.rst b/docs/api/python/relay/index.rst new file mode 100644 index 000000000000..231d49df0e6d --- /dev/null +++ b/docs/api/python/relay/index.rst @@ -0,0 +1,20 @@ +Relay API +========= + +This document contains the Python API to the Relay frontend, optimizer, and +compiler toolchain. + +Relay is the second generation high level intermediate representation for the TVM +compiler stack. + +.. toctree:: + :maxdepth: 2 + + base + env + expr + ir_builder + ir_pass + op + to_tvm + type diff --git a/docs/api/python/relay/ir_builder.rst b/docs/api/python/relay/ir_builder.rst new file mode 100644 index 000000000000..b12e3cc6cdd1 --- /dev/null +++ b/docs/api/python/relay/ir_builder.rst @@ -0,0 +1,6 @@ +tvm.relay.ir_builder +----------- +.. automodule:: tvm.relay.ir_builder + +.. autoclass:: tvm.relay.ir_builder.IRBuilder + :members: \ No newline at end of file diff --git a/docs/api/python/relay/ir_pass.rst b/docs/api/python/relay/ir_pass.rst new file mode 100644 index 000000000000..a7a9ad6251bc --- /dev/null +++ b/docs/api/python/relay/ir_pass.rst @@ -0,0 +1,4 @@ +tvm.relay.ir_pass +----------- +.. automodule:: tvm.relay.ir_pass + :members: \ No newline at end of file diff --git a/docs/api/python/relay/op.rst b/docs/api/python/relay/op.rst new file mode 100644 index 000000000000..fb8e9ce774c2 --- /dev/null +++ b/docs/api/python/relay/op.rst @@ -0,0 +1,3 @@ +tvm.relay.op +----------- +.. automodule:: tvm.relay.op \ No newline at end of file diff --git a/docs/api/python/relay/to_tvm.rst b/docs/api/python/relay/to_tvm.rst new file mode 100644 index 000000000000..fe27f0ec582e --- /dev/null +++ b/docs/api/python/relay/to_tvm.rst @@ -0,0 +1,4 @@ +tvm.relay.to_tvm +----------- +.. automodule:: tvm.relay.to_tvm + :members: \ No newline at end of file diff --git a/docs/api/python/relay/type.rst b/docs/api/python/relay/type.rst new file mode 100644 index 000000000000..d357df8f08ac --- /dev/null +++ b/docs/api/python/relay/type.rst @@ -0,0 +1,27 @@ +tvm.relay.type +----------- +.. automodule:: tvm.relay.type + +.. autoclass:: tvm.relay.type.Type + :members: + +.. autoclass:: tvm.relay.type.TensorType + :members: + +.. autoclass:: tvm.relay.type.Kind + :members: + +.. autoclass:: tvm.relay.type.TypeParam + :members: + +.. autoclass:: tvm.relay.type.TypeConstraint + :members: + +.. autoclass:: tvm.relay.type.FuncType + :members: + +.. autoclass:: tvm.relay.type.TypeCall + :members: + +.. autoclass:: tvm.relay.type.IncompleteType + :members: \ No newline at end of file From 4bf81e41417b437c558f3a8b7ddfb0f5c0a9b8a3 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Thu, 20 Sep 2018 23:38:25 -0700 Subject: [PATCH 03/28] Link to docs from index.rst --- docs/api/python/index.rst | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/api/python/index.rst b/docs/api/python/index.rst index 59bd1795b7ec..ddad9d10f8f9 100644 --- a/docs/api/python/index.rst +++ b/docs/api/python/index.rst @@ -24,3 +24,4 @@ Python API vta/index nnvm/index hybrid + relay/index From 3eb63305a6c5120e3ce38a2b686eef6ffcf40b1b Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Thu, 20 Sep 2018 23:45:49 -0700 Subject: [PATCH 04/28] Add tutorial on FMA --- tutorials/relay/implement_fma_transform.py | 141 +++++++++++++++++++++ 1 file changed, 141 insertions(+) create mode 100644 tutorials/relay/implement_fma_transform.py diff --git a/tutorials/relay/implement_fma_transform.py b/tutorials/relay/implement_fma_transform.py new file mode 100644 index 000000000000..8c04e70aa846 --- /dev/null +++ b/tutorials/relay/implement_fma_transform.py @@ -0,0 +1,141 @@ +"""How to use Relay to implement a simple two-operator fusion pass. +================================== +**Author**: `Jared Roesch `_ + +In this tutorial, we will demonstrate how to write a fusion pass for +the Relay IR. We demonstrate many Relay features including defining a +new operator, a program transform, the NNVM compatibility layer, +and executing the original and transformed programs on the Relay +evaluator and TVM runtime system. +""" + +################################################################ +# Introduction +# ------------------------- +# +# In this tutorial, we will demonstrate how to write a fusion pass for +# the Relay IR. We demonstrate many Relay features including defining a +# new operator, a program transform, the NNVM compatibility layer, +# and executing the original and transformed programs on the Relay +# evaluator and TVM runtime system. + +from typing import Any, Dict + +import numpy as np +import tvm +import topi + +from relay import ir, make as mk +from relay.ir import OperatorId +from relay.opt import ItemVisitor, ExprVisitor +from relay.frontend.nnvm import Variable, symbol +from relay.frontend.nnvm import compiler +from relay.frontend.global_env import get_env +from relay.operators.register import func_ty_to_placeholders, register_op +from relay.eval import defn_to_pyfunc +from relay.tyck import check_expr + +class ExprAtVisitor(ExprVisitor): + """A demo visitor which adds a new traversal strategy.""" + expr_map: Dict[ir.LocalId, ir.Expr] + + def __init__(self): + self.expr_map = {} + + def expr_at(self,id: ir.LocalId) -> ir.Expr: + try: + return self.expr_map[id] + except KeyError: + return id + + def visit_let(self, let: ir.Let) -> ir.Expr: + self.expr_map[let.id] = let.value + return super().visit_let(let) + +# let x = 1 + 1; +# ... x will map to 1 + 1 + +class FuseTwo(ExprAtVisitor): + """Rewrite b(a(x, y), z) into ab(x, y, z). """ + def __init__(self, a: OperatorId, b: OperatorId, a_and_b: OperatorId) -> None: + self.a = a + self.b = b + self.a_and_b = a_and_b + super().__init__() + + def visit_call(self, call: ir.Call) -> ir.Expr: + func = call.fn + if func == self.b: + assert len(call.args) == 2 # An assumption of this fusion code. + arg0 = self.expr_at(call.args[0]) + arg1 = self.expr_at(call.args[1]) + if isinstance(arg0, ir.Call) and arg0.fn == self.a: + new_call = mk.Call(self.a_and_b, arg0.args[:] + [arg1]) + elif isinstance(arg1, ir.Call) and arg1.fn == self.a: + new_call = mk.Call(self.a_and_b, arg1.args[:] + [arg0]) + else: + new_call = super().visit_call(call) + + return new_call + else: + return super().visit_call(call) + +def fma_compile(op_name: str, func_ty: ir.Type, attrs: ir.Attributes=None) -> Any: + Inputs, ret_ty = func_ty_to_placeholders(func_ty) + x, y, z = Inputs + Output = topi.multiply(topi.add(x, y), z) + # this is not a python function call, but builds an AST + schedule = tvm.create_schedule(Output.op) + return [schedule, Inputs + [Output]] + + +def register_fma(env: Any) -> None: + """Register TOPI's elementwise broadcast addition for the `+` operator.""" + shape = mk.TypeParam("s", ir.Kind.Shape) + bt = mk.TypeParam("bt", ir.Kind.BaseType) + in_out_type = mk.TensorType(bt, shape) + fma_type = mk.TypeQuantifier(bt, mk.TypeQuantifier(shape, mk.TypeArrow([in_out_type, in_out_type, in_out_type], in_out_type))) + # forall (bt: BaseTYpe) (s : Shape), Tensor[bt, s] -> Tensor[bt, s] -> Tensor[bt, s] + # TODO: no reverse mode + register_op(env, 'fma', fma_type, compiler=fma_compile) + +# Get the global environment for demo purposes. +env = get_env() + +register_fma(env) + +# A small helper which applies just our transform to the Relay expression. +def transform(e): + fuse = FuseTwo(env.add_id(), env.mul_id(), env.operator_id('fma')) + e = fuse.visit(e) + # Now let's use the type checker to make sure we didn't make a mistake. + check_expr(env, e) + return e + +# We will use NNVM frontend. +x = Variable('x') +y = Variable('y') +z = x * (x + y) + +relay_func = compiler.to_relay(z) + +print(f"Relay Function:\n{compiler.pp(relay_func)}") + +xform_func = transform(relay_func) + +print(f"Transformed Function:\n{compiler.pp(xform_func)}") + +# Use the evaluator. +norm = defn_to_pyfunc(env, relay_func) +xform = defn_to_pyfunc(env, xform_func) + +x = np.random.uniform(size=(10, 5, 10)).astype('float32') +y = np.random.uniform(size=(10, 5, 10)).astype('float32') + +norm_out = norm(x, y).asnumpy() +xform_out = xform(x, y).asnumpy() + +np.testing.assert_allclose(norm_out, xform_out) + +# Use the TVM runtime. + From 28ac7b5a9b99f4c44c701e27b04a004324797962 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Thu, 20 Sep 2018 23:58:45 -0700 Subject: [PATCH 05/28] First clean up pass after restoring --- docs/api/python/relay/base.rst | 2 +- docs/api/python/relay/env.rst | 2 +- docs/api/python/relay/expr.rst | 2 +- docs/api/python/relay/ir_builder.rst | 2 +- docs/api/python/relay/ir_pass.rst | 2 +- docs/api/python/relay/op.rst | 2 +- docs/api/python/relay/to_tvm.rst | 4 --- docs/api/python/relay/{type.rst => ty.rst} | 2 +- docs/langref/relay/expressions.rst | 29 +++++++-------- docs/langref/relay/index.rst | 2 +- docs/langref/relay/intro.rst | 42 +++++++++++----------- docs/langref/relay/type_system.rst | 26 +++++++------- 12 files changed, 58 insertions(+), 59 deletions(-) delete mode 100644 docs/api/python/relay/to_tvm.rst rename docs/api/python/relay/{type.rst => ty.rst} (96%) diff --git a/docs/api/python/relay/base.rst b/docs/api/python/relay/base.rst index f0cec295ee6b..869a402d7f8e 100644 --- a/docs/api/python/relay/base.rst +++ b/docs/api/python/relay/base.rst @@ -1,5 +1,5 @@ tvm.relay.base ------------ +-------------- .. automodule:: tvm.relay.base .. autoclass:: tvm.relay.base.NodeBase diff --git a/docs/api/python/relay/env.rst b/docs/api/python/relay/env.rst index eca7312d5bbb..e54485700058 100644 --- a/docs/api/python/relay/env.rst +++ b/docs/api/python/relay/env.rst @@ -1,5 +1,5 @@ tvm.relay.env ------------ +------------- .. automodule:: tvm.relay.env .. autoclass:: tvm.relay.env.Environment diff --git a/docs/api/python/relay/expr.rst b/docs/api/python/relay/expr.rst index cd0cb5c308c4..100282f4b344 100644 --- a/docs/api/python/relay/expr.rst +++ b/docs/api/python/relay/expr.rst @@ -1,5 +1,5 @@ tvm.relay.expr ------------ +-------------- .. automodule:: tvm.relay.expr .. autoclass:: tvm.relay.expr.ExprBuilder diff --git a/docs/api/python/relay/ir_builder.rst b/docs/api/python/relay/ir_builder.rst index b12e3cc6cdd1..0e1c3d87a3e5 100644 --- a/docs/api/python/relay/ir_builder.rst +++ b/docs/api/python/relay/ir_builder.rst @@ -1,5 +1,5 @@ tvm.relay.ir_builder ------------ +-------------------- .. automodule:: tvm.relay.ir_builder .. autoclass:: tvm.relay.ir_builder.IRBuilder diff --git a/docs/api/python/relay/ir_pass.rst b/docs/api/python/relay/ir_pass.rst index a7a9ad6251bc..d02ef4d94b0a 100644 --- a/docs/api/python/relay/ir_pass.rst +++ b/docs/api/python/relay/ir_pass.rst @@ -1,4 +1,4 @@ tvm.relay.ir_pass ------------ +----------------- .. automodule:: tvm.relay.ir_pass :members: \ No newline at end of file diff --git a/docs/api/python/relay/op.rst b/docs/api/python/relay/op.rst index fb8e9ce774c2..8db3c3e75aba 100644 --- a/docs/api/python/relay/op.rst +++ b/docs/api/python/relay/op.rst @@ -1,3 +1,3 @@ tvm.relay.op ------------ +------------ .. automodule:: tvm.relay.op \ No newline at end of file diff --git a/docs/api/python/relay/to_tvm.rst b/docs/api/python/relay/to_tvm.rst deleted file mode 100644 index fe27f0ec582e..000000000000 --- a/docs/api/python/relay/to_tvm.rst +++ /dev/null @@ -1,4 +0,0 @@ -tvm.relay.to_tvm ------------ -.. automodule:: tvm.relay.to_tvm - :members: \ No newline at end of file diff --git a/docs/api/python/relay/type.rst b/docs/api/python/relay/ty.rst similarity index 96% rename from docs/api/python/relay/type.rst rename to docs/api/python/relay/ty.rst index d357df8f08ac..1a8e8af57fde 100644 --- a/docs/api/python/relay/type.rst +++ b/docs/api/python/relay/ty.rst @@ -1,5 +1,5 @@ tvm.relay.type ------------ +-------------- .. automodule:: tvm.relay.type .. autoclass:: tvm.relay.type.Type diff --git a/docs/langref/relay/expressions.rst b/docs/langref/relay/expressions.rst index 3be5dadd3fdb..92c3ffdc226c 100644 --- a/docs/langref/relay/expressions.rst +++ b/docs/langref/relay/expressions.rst @@ -1,18 +1,18 @@ -================== +=========== Expressions -================== +=========== -The Relay IR is a pure, expression oriented language, with a dataflow fragment -and structured control flow. Although Relay's representation is a tree, it is +The Relay IR is a pure, expression oriented language, with a dataflow fragment +and structured control flow. Although Relay's representation is a tree, it is possible to view the dataflow fragments as graph for purposes of writing and expressing transformations. The below sections make an attempt to clearly split the dataflow fragment from the control fragment. -================== +==================== Dataflow Expressions -================== +==================== First we will cover the set of nodes which do not involve control flow, this fragment of the language is semantically equivalent to pure @@ -23,15 +23,15 @@ Constants Relay programs can contain constant Tensor values. This node represents a constant tensor value (values are either Tensors, Products, or Closures in Relay). -The constants are represented as :py:class:`~tvm.NDArray`, allowing us to utilize +The constants are represented as :py:class:`~tvm.NDArray`, allowing us to utilize TVM operators for constant evaluation. See :py:class:`~tvm.relay.expr.Constant` for its definition and documentation. Tuple -~~~~~~~~~~~~~~~ +~~~~~ -We support tuple constructors; the tuple node builds a finite (i.e statically known size) sequence of +We support tuple constructors; the tuple node builds a finite (i.e statically known size) sequence of heterogenous data. These tuples match closely to Python's and enable efficient projection of their m embers due to their fixed length. @@ -65,13 +65,14 @@ binding to define locally recursive functions. 0 } else { x * fact(x - 1) + } }; fact(10) See :py:class:`~tvm.relay.expr.Function` for its definition and documentation. Variables -~~~~~~~~~~~ +~~~~~~~~~ Both global variables, and local variables, are valid expressions, one may use them anywhere an expression may appear. @@ -81,7 +82,7 @@ For example the below fragment of code is a valid expression. .. code-block:: python %ret = @global(op_name, %local) -See :py:class:`~tvm.relay.expr.LocalVar` and :py:class:`~tvm.expr.GlobalVar` for its definition +See :py:class:`~tvm.relay.expr.LocalVar` and :py:class:`~tvm.expr.GlobalVar` for its definition and documentation. Let Binding @@ -89,7 +90,7 @@ Let Binding An immutable variable binding, allows the user to bind an expression to a name. A let binding contains a local variable, -an optional type annotation, a value, and body expression +an optional type annotation, a value, and body expression which may reference the bound identifier. We will first introduce a single binding with no type @@ -153,8 +154,8 @@ tensor of booleans (:code:`Tensor[(), bool]`). .. code-block:: python if (sum(equal(t, u))) { - jreturn x: - } else { + return x: + } else { return y; } diff --git a/docs/langref/relay/index.rst b/docs/langref/relay/index.rst index 617e745acdfc..c61cbc0fc97b 100644 --- a/docs/langref/relay/index.rst +++ b/docs/langref/relay/index.rst @@ -4,7 +4,7 @@ Relay Language Reference This document is a work in progress language reference describing Relay, TVM's high level intermediate representation. The name is an allusion to interneurons which are often referred to as intermediate, -or relay neurons. +or Relay neurons. We will continually iterate on this document as we evolve the new IR and update accordingly. diff --git a/docs/langref/relay/intro.rst b/docs/langref/relay/intro.rst index 4b568c675358..875054a96ae9 100644 --- a/docs/langref/relay/intro.rst +++ b/docs/langref/relay/intro.rst @@ -4,19 +4,21 @@ Introduction Relay is differentiable programming language with support for closures, control-flow, and recursion. It has an advanced -static type system specifically designed for programs written by machine -learning practioniers and researchers. +static type system specifically designed for programs written +by machine learning practioniers and researchers. -Relay is intended to replace computation graph based +Relay is intended to replace the computation graph based intermediate representations currently employed by deep learning frameworks and compilers. The deep learning community -has organically evolved from a representation that was useful -for the limited form of computation originally desired, i.e -a DAG of primitive functions. +has organically evolved a representation that was useful +for the form of computation originally desired, i.e +a directed acyclic graph of primitive functions. -This may be a useful way to describe computation, but is limited once -you want to generalize to dynamic models. +This may be a useful way to describe early ML models, but is +limited onceyou want to generalize to dynamic models. +At the same time computation graphs have been over loaded +purpose as both a compile-time and run-time data structure. The conflation of the description of a computation, the representation used for optimizing it, and the data structure used to execute it uncessarily hampers many of the goals @@ -78,8 +80,8 @@ Variables ================== Relay has three variable references local, global, and operators. Our design draws inspiration -from LLVM which differentiates between identifier types. This enables writers of -optimizations to know precisely what an identifier references without needing information +from LLVM which differentiates between identifier types. This enables writers of +optimizations to know precisely what an identifier references without needing information beyond the type of identifier. Globals are written with `@`, locals are written with `%`, and operators are written with @@ -105,9 +107,9 @@ references a parameter, or let bound expression. You can write a local identifier as :code:`%local`. -================== +================ Global Functions -================== +================ A definition consists of a name, type parameter, parameters, and an optional return type. A Relay definition is similar to a procedure or function in a typical programming @@ -134,7 +136,7 @@ we can restrict the above definition to only work on certain types def @add(%x: Tensor, %y: Tensor) -> Tensor { %x + %y - } + } A parameter is just a pairing of a :py:class:`~tvm.relay.expr.LocalVar` and optional :py:class:`~tvm.relay.type.Type`. They represent the formal parameters of functions and definitions, and are written as :code:`%x : T`. @@ -178,26 +180,26 @@ For example we can set the attributes for :code:`@id_real`.:: def id_real(%x:Real) { ret %x } -================== +========= Operators -================== +========= A primitive operation that is not defined in the Relay language but provided externally. Currently we back these operators registrations with the operators -exposed by TVM's TOPI. An operator requires a user to provide an implementation +exposed by TVM's TOPI. An operator requires a user to provide an implementation of the operator, its type and various attributes required by Relay subsystems. The input methods for Relay programs do not provide a way to describe operators in Relay, they must be explicitly registered in the global environment via Python or C++. -Operators are rendered without a sigil (e.g :code:`add`, :code:`subtract`) when pretty -printing Relay programs. Operators are explicitly contained in the program and are uniquely +Operators are rendered without a sigil (e.g :code:`add`, :code:`subtract`) when pretty +printing Relay programs. Operators are explicitly contained in the program and are uniquely identifiable by pointer during a run of the Relay compiler. Programs ~~~~~~~~ Now that we have presented both global functions, and operators we have -everthing in hand to describe a complete Relay program. A Relay program consists of a -registry of operators, one or more functions, as well as the global configuration +everthing in hand to describe a complete Relay program. A Relay program consists of a +registry of operators, one or more functions, as well as the global configuration stored in the environment. \ No newline at end of file diff --git a/docs/langref/relay/type_system.rst b/docs/langref/relay/type_system.rst index aef53d334010..b51fc6203386 100644 --- a/docs/langref/relay/type_system.rst +++ b/docs/langref/relay/type_system.rst @@ -1,24 +1,24 @@ -================== +=========== Type System -================== +=========== We have briefly introduced types while detailing the the expression language of Relay, but have fully laid out the type system. -Although the majority of Relay programs are written without type annotations, +Although the majority of Relay programs are written without type annotations, Relay is statically typed. -Static types are useful because they enable efficient layout, memory reuse, and +Static types are useful because they enable efficient layout, memory reuse, and code generation. They aid in debugging program transformations, but can also -give us the expressivity afforded by more dynamic langauges. +give us the expressivity afforded by more dynamic langauges. We are able to omit these type annotations by a process known as type inference. Type inference is a technique that has its roots in the programming language community, and can be viewed as a method for generalizing shape inference to run over arbitrary user programs containing control flow and recursion. -Static types are useful when performing compiler optimization because they -communicate properties about the data we manipulate, such as runtime shape, +Static types are useful when performing compiler optimization because they +communicate properties about the data we manipulate, such as runtime shape, data layout, storage without needing to run the program. Most current IRs use "shape inference" to recover Tensor dimensions from the user @@ -52,7 +52,7 @@ The base type for all Relay types. All Relay types are sub-classes of this base See :py:class:`~tvm.relay.type.Type` for its definition and documentation. Tensor Type -~~~~~~~~~~ +~~~~~~~~~~~ A concrete TensorType in Relay, see tvm/relay/type.h for more details. @@ -79,9 +79,9 @@ The kind of a type parameter, represents a variable shape, base type, type, or dimension. This controls what a type parameter is allowed to be instantiated -with. For example one's of kind BaseType can only be `float32`, +with. For example one's of kind BaseType can only be `float32`, `int32`, and so on. - + See :py:class:`~tvm.relay.type.Kind` for its definition and documentation. Type Parameter @@ -121,7 +121,7 @@ See :py:class:`~tvm.relay.type.FuncType` for its definition and documentation. Type Relation ~~~~~~~~~~~~~ -A type relation is the most exotic type system feature in Relay. It allows +A type relation is the most exotic type system feature in Relay. It allows users to extend type and shape checking/inference with new rules. We use type relations to type operators with "hard" types such as broadcasting operators, or special ones like :code:`flatten`. @@ -150,7 +150,7 @@ such as :code:`elemwise_add`: .. code-block:: python elemwise_add : forall (Lhs : Type) (Rhs : Type), (Lhs, Rhs) -> Broadcast(Lhs, Rhs) -You might ask why we write the relation in the return type but we use it as a +You might ask why we write the relation in the return type but we use it as a notational convenience for: .. code-block:: python @@ -164,7 +164,7 @@ See :py:class:`~tvm.relay.type.TypeRelation` for its definition and documentatio Type Call ~~~~~~~~~ -Apply a type relation to a set of input arguments, at the present momen the type +Apply a type relation to a set of input arguments, at the present momen the type call node represents the application of a :py:class:`~tvm.relay.type.TypeRelation` to a set of input arguments. The result of type application is the output variable of the type relation. From f98d7933ffaa01355b2ffe5574af40a6c2804499 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Mon, 15 Oct 2018 14:58:25 -0700 Subject: [PATCH 06/28] Hold tutorial for future PR --- tutorials/relay/implement_fma_transform.py | 141 --------------------- 1 file changed, 141 deletions(-) delete mode 100644 tutorials/relay/implement_fma_transform.py diff --git a/tutorials/relay/implement_fma_transform.py b/tutorials/relay/implement_fma_transform.py deleted file mode 100644 index 8c04e70aa846..000000000000 --- a/tutorials/relay/implement_fma_transform.py +++ /dev/null @@ -1,141 +0,0 @@ -"""How to use Relay to implement a simple two-operator fusion pass. -================================== -**Author**: `Jared Roesch `_ - -In this tutorial, we will demonstrate how to write a fusion pass for -the Relay IR. We demonstrate many Relay features including defining a -new operator, a program transform, the NNVM compatibility layer, -and executing the original and transformed programs on the Relay -evaluator and TVM runtime system. -""" - -################################################################ -# Introduction -# ------------------------- -# -# In this tutorial, we will demonstrate how to write a fusion pass for -# the Relay IR. We demonstrate many Relay features including defining a -# new operator, a program transform, the NNVM compatibility layer, -# and executing the original and transformed programs on the Relay -# evaluator and TVM runtime system. - -from typing import Any, Dict - -import numpy as np -import tvm -import topi - -from relay import ir, make as mk -from relay.ir import OperatorId -from relay.opt import ItemVisitor, ExprVisitor -from relay.frontend.nnvm import Variable, symbol -from relay.frontend.nnvm import compiler -from relay.frontend.global_env import get_env -from relay.operators.register import func_ty_to_placeholders, register_op -from relay.eval import defn_to_pyfunc -from relay.tyck import check_expr - -class ExprAtVisitor(ExprVisitor): - """A demo visitor which adds a new traversal strategy.""" - expr_map: Dict[ir.LocalId, ir.Expr] - - def __init__(self): - self.expr_map = {} - - def expr_at(self,id: ir.LocalId) -> ir.Expr: - try: - return self.expr_map[id] - except KeyError: - return id - - def visit_let(self, let: ir.Let) -> ir.Expr: - self.expr_map[let.id] = let.value - return super().visit_let(let) - -# let x = 1 + 1; -# ... x will map to 1 + 1 - -class FuseTwo(ExprAtVisitor): - """Rewrite b(a(x, y), z) into ab(x, y, z). """ - def __init__(self, a: OperatorId, b: OperatorId, a_and_b: OperatorId) -> None: - self.a = a - self.b = b - self.a_and_b = a_and_b - super().__init__() - - def visit_call(self, call: ir.Call) -> ir.Expr: - func = call.fn - if func == self.b: - assert len(call.args) == 2 # An assumption of this fusion code. - arg0 = self.expr_at(call.args[0]) - arg1 = self.expr_at(call.args[1]) - if isinstance(arg0, ir.Call) and arg0.fn == self.a: - new_call = mk.Call(self.a_and_b, arg0.args[:] + [arg1]) - elif isinstance(arg1, ir.Call) and arg1.fn == self.a: - new_call = mk.Call(self.a_and_b, arg1.args[:] + [arg0]) - else: - new_call = super().visit_call(call) - - return new_call - else: - return super().visit_call(call) - -def fma_compile(op_name: str, func_ty: ir.Type, attrs: ir.Attributes=None) -> Any: - Inputs, ret_ty = func_ty_to_placeholders(func_ty) - x, y, z = Inputs - Output = topi.multiply(topi.add(x, y), z) - # this is not a python function call, but builds an AST - schedule = tvm.create_schedule(Output.op) - return [schedule, Inputs + [Output]] - - -def register_fma(env: Any) -> None: - """Register TOPI's elementwise broadcast addition for the `+` operator.""" - shape = mk.TypeParam("s", ir.Kind.Shape) - bt = mk.TypeParam("bt", ir.Kind.BaseType) - in_out_type = mk.TensorType(bt, shape) - fma_type = mk.TypeQuantifier(bt, mk.TypeQuantifier(shape, mk.TypeArrow([in_out_type, in_out_type, in_out_type], in_out_type))) - # forall (bt: BaseTYpe) (s : Shape), Tensor[bt, s] -> Tensor[bt, s] -> Tensor[bt, s] - # TODO: no reverse mode - register_op(env, 'fma', fma_type, compiler=fma_compile) - -# Get the global environment for demo purposes. -env = get_env() - -register_fma(env) - -# A small helper which applies just our transform to the Relay expression. -def transform(e): - fuse = FuseTwo(env.add_id(), env.mul_id(), env.operator_id('fma')) - e = fuse.visit(e) - # Now let's use the type checker to make sure we didn't make a mistake. - check_expr(env, e) - return e - -# We will use NNVM frontend. -x = Variable('x') -y = Variable('y') -z = x * (x + y) - -relay_func = compiler.to_relay(z) - -print(f"Relay Function:\n{compiler.pp(relay_func)}") - -xform_func = transform(relay_func) - -print(f"Transformed Function:\n{compiler.pp(xform_func)}") - -# Use the evaluator. -norm = defn_to_pyfunc(env, relay_func) -xform = defn_to_pyfunc(env, xform_func) - -x = np.random.uniform(size=(10, 5, 10)).astype('float32') -y = np.random.uniform(size=(10, 5, 10)).astype('float32') - -norm_out = norm(x, y).asnumpy() -xform_out = xform(x, y).asnumpy() - -np.testing.assert_allclose(norm_out, xform_out) - -# Use the TVM runtime. - From c6054156bdb882bd440124e26b7ea61c67d33298 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Tue, 16 Oct 2018 17:50:39 -0700 Subject: [PATCH 07/28] Pass one of addressing feedback --- docs/api/python/relay/index.rst | 4 +- docs/langref/relay/expressions.rst | 79 +++++++++++++++--------------- 2 files changed, 42 insertions(+), 41 deletions(-) diff --git a/docs/api/python/relay/index.rst b/docs/api/python/relay/index.rst index 231d49df0e6d..7e9c3cb50f38 100644 --- a/docs/api/python/relay/index.rst +++ b/docs/api/python/relay/index.rst @@ -1,10 +1,10 @@ Relay API ========= -This document contains the Python API to the Relay frontend, optimizer, and +This document contains the Python API for the Relay frontend, optimizer, and compiler toolchain. -Relay is the second generation high level intermediate representation for the TVM +Relay is the second-generation, high-level intermediate representation (IR) for the TVM compiler stack. .. toctree:: diff --git a/docs/langref/relay/expressions.rst b/docs/langref/relay/expressions.rst index 92c3ffdc226c..3275be29d163 100644 --- a/docs/langref/relay/expressions.rst +++ b/docs/langref/relay/expressions.rst @@ -2,27 +2,28 @@ Expressions =========== -The Relay IR is a pure, expression oriented language, with a dataflow fragment -and structured control flow. Although Relay's representation is a tree, it is -possible to view the dataflow fragments as graph for purposes of writing and +The Relay IR is a pure, expression-oriented language with distinct dataflow +and control flow language fragments. Although Relay's representation is an abstract syntax +tree, it is possible to view the dataflow fragment as graph for purposes of writing and expressing transformations. -The below sections make an attempt to clearly split the dataflow -fragment from the control fragment. +The below sections make an attempt to clearly split the expressions which +are pure dataflow (equivalent to traditional computation graphs) from +the extended expressions which contain control flow. ==================== Dataflow Expressions ==================== -First we will cover the set of nodes which do not involve control flow, -this fragment of the language is semantically equivalent to pure -computation graphs without control flow. +First we will cover the set of expressions which do not involve control flow; +this fragment of the language is semantically equivalent to pure computation graphs +without control flow. Constants ~~~~~~~~~ Relay programs can contain constant Tensor values. This node represents -a constant tensor value (values are either Tensors, Products, or Closures in Relay). +a constant tensor value (see :py:mod:~tvm.relay.Value for more details). The constants are represented as :py:class:`~tvm.NDArray`, allowing us to utilize TVM operators for constant evaluation. @@ -32,8 +33,8 @@ Tuple ~~~~~ We support tuple constructors; the tuple node builds a finite (i.e statically known size) sequence of -heterogenous data. These tuples match closely to Python's and enable efficient projection of their m -embers due to their fixed length. +heterogeneous data. These tuples match closely to Python's and enable efficient projection of their +members due to their fixed length. .. code-block:: python @@ -46,7 +47,7 @@ See :py:class:`~tvm.relay.expr.Tuple` for its definition and documentation. Function ~~~~~~~~ -A function node represents a function, it contains a seqeuence of +A function node represents a function; it contains a sequence of parameters, a return type, and a body. .. code-block:: python @@ -54,8 +55,8 @@ parameters, a return type, and a body. fun (x : Float, y: Float) -> Float { x + y } Functions are first class in Relay, and can be used in any expression -position. Functions are the same as global functions, but do not have -an explicit name. You can use a function in conjunction with a let +position. Functions expressions are the same as global functions, but do not +have a globally unique name. You can use a function in conjunction with a let binding to define locally recursive functions. .. code-block:: python @@ -74,7 +75,7 @@ See :py:class:`~tvm.relay.expr.Function` for its definition and documentation. Variables ~~~~~~~~~ -Both global variables, and local variables, are valid expressions, one may use them +Both global variables, and local variables are valid expressions, one may use them anywhere an expression may appear. For example the below fragment of code is a valid expression. @@ -93,8 +94,8 @@ expression to a name. A let binding contains a local variable, an optional type annotation, a value, and body expression which may reference the bound identifier. -We will first introduce a single binding with no type -anntoations: +We will first introduce a single binding without +type annotations: .. code-block:: python let %x = %a + %b; @@ -103,25 +104,22 @@ anntoations: The value of a let binding is the value of the final expression after evaluating the bindings it depends on. -A user can write a sequence of let bindings, we can view -these blocks and pure dataflow -single binding. These blocks are pure dataflow, and can -be evaluated in any order, reordered up to dataflow. +A sequence of let bindings can be viewed as a dataflow graph, +where the bindings are a series of sub-graphs connected +by bound variables. Since these binding sequences are +pure, we can evaluate them in any order up to the program +dataflow. -We support a sequence of bindings followed by a body which -is the continutation after executing the sequence of bindings. +For example the below Relay program is equivalent to the +below NNVM program. -I believe this representation will be easier to manipulate then -the mixed dataflow/control flow comptuation graphs. -Data flow and control flow are strictly seperated in this representation -and we can easily syntactically discriminate. When in ANF there should only be -general control flow between `Assignment` nodes and not within the values bound -in bindings. +.. code-block:: python + let %y_pred = %x * %w + %b; + let %loss = pow(%y - %y_pred, 2); + ret %loss -This representation also makes it easy to apply reverse more since -sequences of assignments where the only control flow is call instructions -are treated by the algorithm uniformly, and each control flow construct -must be handled individualy. +.. code-block:: python + TODO See :py:class:`~tvm.relay.expr.Let` for its definition and documentation. @@ -129,17 +127,20 @@ See :py:class:`~tvm.relay.expr.Let` for its definition and documentation. Control Flow Expression ======================= -Control flow expressions change network topology based on values -computed by previous expressions. +Control flow expressions enable network topology to change based +based on the value of previously executed expressions. Call ~~~~ -Terms with function types in Relay are "callable", i.e they can be invoked like -a function in a typical programming language by supplying a set of arguments. +Expressions with function types in Relay are "callable", i.e they can be invoked using +a function call. All Relay functions are typed with function types, as well as all Relay operators. +For example we can call the previously defined `fact` because it has a function +type: + .. code-block:: python fact(10) @@ -148,8 +149,8 @@ See :py:class:`~tvm.relay.expr.Call` for its definition and documentation. If-Then-Else ~~~~~~~~~~~~ -Relay has a simple if/then/else expression which allows programs to branch -on a single control value which must be of type :code:`bool`, i.e a zero-rank +Relay has a simple if-then-else expression which allows programs to branch +on a single value of type :code:`bool`, i.e a zero-rank tensor of booleans (:code:`Tensor[(), bool]`). .. code-block:: python From 490b42d2a81a964f795e5b9dbe9d7f92a9eb43e6 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Wed, 17 Oct 2018 16:41:18 -0700 Subject: [PATCH 08/28] Address a few more typos --- docs/langref/relay/intro.rst | 6 +++--- docs/langref/relay/type_system.rst | 7 ------- 2 files changed, 3 insertions(+), 10 deletions(-) diff --git a/docs/langref/relay/intro.rst b/docs/langref/relay/intro.rst index 875054a96ae9..5d078f9e5053 100644 --- a/docs/langref/relay/intro.rst +++ b/docs/langref/relay/intro.rst @@ -5,7 +5,7 @@ Introduction Relay is differentiable programming language with support for closures, control-flow, and recursion. It has an advanced static type system specifically designed for programs written -by machine learning practioniers and researchers. +by machine learning practitioners and researchers. Relay is intended to replace the computation graph based intermediate representations currently employed by deep @@ -15,7 +15,7 @@ for the form of computation originally desired, i.e a directed acyclic graph of primitive functions. This may be a useful way to describe early ML models, but is -limited onceyou want to generalize to dynamic models. +limited once you want to generalize to dynamic models. At the same time computation graphs have been over loaded purpose as both a compile-time and run-time data structure. @@ -87,7 +87,7 @@ beyond the type of identifier. Globals are written with `@`, locals are written with `%`, and operators are written with no sigil like LLVM's intrinsics. The distinction between global and local identifiers makes certain kinds of transformation easier. For example inlining a global definition -requires no analysis, you can write a pass that just directly inlines the defintions. +requires no analysis, you can write a pass that just directly inlines the definitions. It also ensures there are no spooky action at a distance, introducing a new identifier of any type will never introduce an ambiguity to the program. diff --git a/docs/langref/relay/type_system.rst b/docs/langref/relay/type_system.rst index b51fc6203386..ce6ca5c52ee4 100644 --- a/docs/langref/relay/type_system.rst +++ b/docs/langref/relay/type_system.rst @@ -70,7 +70,6 @@ This restriction importantly means The shape may be any valid Relay shape as described in the below section on shapes. - See :py:class:`~tvm.relay.type.TensorType` for its definition and documentation. Kind @@ -179,9 +178,3 @@ A type, or portion of a type which is not known yet. Only used during type infer .. note:: Known as a "type variable" in the type checking literature. See :py:class:`~tvm.relay.type.IncompleteType` for its definition and documentation. - - - - - - From 7809a52467923f6335e5604630113af18f872f6b Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Thu, 18 Oct 2018 15:45:45 -0700 Subject: [PATCH 09/28] More refactoring --- docs/api/python/relay/ty.rst | 20 +++--- docs/langref/relay/intro.rst | 108 ++++++++++++----------------- docs/langref/relay/type_system.rst | 20 +++--- 3 files changed, 64 insertions(+), 84 deletions(-) diff --git a/docs/api/python/relay/ty.rst b/docs/api/python/relay/ty.rst index 1a8e8af57fde..971f83bd89c1 100644 --- a/docs/api/python/relay/ty.rst +++ b/docs/api/python/relay/ty.rst @@ -1,27 +1,27 @@ -tvm.relay.type +tvm.relay.ty -------------- -.. automodule:: tvm.relay.type +.. automodule:: tvm.relay.ty -.. autoclass:: tvm.relay.type.Type +.. autoclass:: tvm.relay.ty.Type :members: -.. autoclass:: tvm.relay.type.TensorType +.. autoclass:: tvm.relay.ty.TensorType :members: -.. autoclass:: tvm.relay.type.Kind +.. autoclass:: tvm.relay.ty.Kind :members: -.. autoclass:: tvm.relay.type.TypeParam +.. autoclass:: tvm.relay.ty.TypeParam :members: -.. autoclass:: tvm.relay.type.TypeConstraint +.. autoclass:: tvm.relay.ty.TypeConstraint :members: -.. autoclass:: tvm.relay.type.FuncType +.. autoclass:: tvm.relay.ty.FuncType :members: -.. autoclass:: tvm.relay.type.TypeCall +.. autoclass:: tvm.relay.ty.TypeCall :members: -.. autoclass:: tvm.relay.type.IncompleteType +.. autoclass:: tvm.relay.ty.IncompleteType :members: \ No newline at end of file diff --git a/docs/langref/relay/intro.rst b/docs/langref/relay/intro.rst index 5d078f9e5053..9a55964d7df9 100644 --- a/docs/langref/relay/intro.rst +++ b/docs/langref/relay/intro.rst @@ -1,55 +1,34 @@ -================== +============ Introduction -================== +============ Relay is differentiable programming language with support for closures, control-flow, and recursion. It has an advanced static type system specifically designed for programs written -by machine learning practitioners and researchers. - -Relay is intended to replace the computation graph based -intermediate representations currently employed by deep +by machine learning practitioners and researchers. Relay is intended to replace +the computation graph based intermediate representations currently employed by deep learning frameworks and compilers. The deep learning community has organically evolved a representation that was useful for the form of computation originally desired, i.e a directed acyclic graph of primitive functions. -This may be a useful way to describe early ML models, but is -limited once you want to generalize to dynamic models. - -At the same time computation graphs have been over loaded -purpose as both a compile-time and run-time data structure. -The conflation of the description of a computation, the +Computation graphs were a useful way to describe ML models with static +topology, but make representing control flow, and abstraction tricky. +Computation graphs have dual purpose as both a compile-time and run-time data +structure. The conflation of the description of a computation, the representation used for optimizing it, and the data structure -used to execute it uncessarily hampers many of the goals -of a framework. - -We believe having a high level, an expressive language designed +used to execute it unnecessarily hampers many goals of machine +learning frameworks. We believe having a high level, expressive language designed for compiler optimizations is essential to the future of an end-to-end deep learning compiler stack. -Relay's design was influenced by the authors previous experince -building advanced optimizing compilers for high level languages, -as well as experinces with the current TVM stack, and NNVM. - -Concretely NNVM has played the role of this high level IR. - -for NNVM which address design flaws of the computation graph IRs -employed by common deep learning frameworks. -We address a few important challenges. First we present a new IR with first -class functions and closures, enabling the design of differentiable -arbitrary control flow constructs. Second we contribute a type system for -Relay which makes the important properties of computations involving tensors -explicit. - -The result is a high-level IR for differentiable comptuation that can -be used to abstract over the details of execution including devices, -parallelism, and distribution. - -We believe Relay will enable researchers and industry to design a -new class of DL framework frontends, differentiable programming languages, -deep probablistic programming languages, thus accelerating the pace of -research in applications. +Relay's design is influenced by the authors' experience building advanced optimizing compilers +for high level languages, as well as challenges presented by the current version +TVM stack, and NNVM's IR. We address a few important challenges with Relay's design. +Relay is an IR with closures, control-flow, recursion, and advanced type system supporting, +complex shape relationships, and symbolic dimensions. We can define a series of +automatic-differentiation over the language, with the goal of enabling higher-order +differentiation of programs with control-flow and closures. ================== Language @@ -63,29 +42,30 @@ IR Reference The IR has a global environment which stores the set of definitions, constants, options, attributes, and provides access to features like -the type inferencer, constant evaluator, and more. +type inference, constant evaluation, and more. -## Node +~~~~~~~~~~ +Relay Node +~~~~~~~~~~ -The fundmental unit of the IR is the node, all nodes must have -a type field. +The fundamental unit of the IR is the node, which only contains a Span. .. code-block:: python + class Node: - def checked_type() -> Type: - ... + span: Span ================== Variables ================== -Relay has three variable references local, global, and operators. Our design draws inspiration -from LLVM which differentiates between identifier types. This enables writers of -optimizations to know precisely what an identifier references without needing information -beyond the type of identifier. +Relay has two notions of variables local, and global. +Our design draws inspiration from LLVM which differentiates between identifier types. +This enables writers of optimizations to know precisely what an identifier references without needing +information beyond the kind of identifier. -Globals are written with `@`, locals are written with `%`, and operators are written with -no sigil like LLVM's intrinsics. The distinction between global and local identifiers +Globals are written with `@`, locals are written with `%`, variables written without a +sigil name the corresponding operator. The distinction between global and local identifiers makes certain kinds of transformation easier. For example inlining a global definition requires no analysis, you can write a pass that just directly inlines the definitions. It also ensures there are no spooky action at a distance, introducing a new identifier @@ -95,9 +75,9 @@ of any type will never introduce an ambiguity to the program. Global Variable ~~~~~~~~~~~~~~~~~~ -Global identifiers are prefixed by the ::@ sigil. A global identifier always +Global identifiers are prefixed by the `@` sigil. A global identifier always references a globally visibly definition contained in the environment. You -can write a global identifier as ::@global. +can write a global identifier as `@global`. Local Variable ~~~~~~~~~~~~~~~~~ @@ -112,8 +92,8 @@ Global Functions ================ A definition consists of a name, type parameter, parameters, and an optional return -type. A Relay definition is similar to a procedure or function in a typical programming -language, but can also be viewed as a named subgraph. +type. A global function is no different then a procedures or function in a typical programming +language, and generalize the concept of a named subgraph. A definition minimally consists of an identifier :code:`@id`, an empty set of parameters, and a body expression contained by curly braces @@ -122,7 +102,7 @@ parameters, and a body expression contained by curly braces def @id() { body } -A definiton may also contain any number of parameters, for example a +A definition may also contain any number of parameters, for example a simple function which just adds two tensors .. code-block:: python @@ -138,7 +118,7 @@ we can restrict the above definition to only work on certain types %x + %y } -A parameter is just a pairing of a :py:class:`~tvm.relay.expr.LocalVar` and optional :py:class:`~tvm.relay.type.Type`. They represent +A parameter is just a pairing of a :py:class:`~tvm.relay.expr.LocalVar` and optional :py:class:`~tvm.relay.ty.Type`. They represent the formal parameters of functions and definitions, and are written as :code:`%x : T`. They may only appear in function literals, and definitions, and have no relation @@ -151,7 +131,7 @@ return type is omitted we will infer the return type based on the text of the program. Finally we can directly construct type polymorphic definitions by writing down -a set of type parameters for a definition. To define a polymoprhic identity +a set of type parameters for a definition. To define a polymorphic identity function, the function which just returns its argument as so. :: def @id(%x: Tensor) { @@ -160,24 +140,24 @@ function, the function which just returns its argument as so. Notice we can omit the return type, and it will still be inferred. -*Note: this is not yet implemented.* +.. *Note: this is not yet implemented.* -Finally we allo a definition be prefixed by metadata, which adds +.. Finally we allow a definition be prefixed by metadata, which adds extra properties to the definition. -It is important to be able to annotate metadata that is external to +.. It is important to be able to annotate metadata that is external to the computational behavior of a definition. For example we can use this to add an `inline` or `noinline` attribute which the compiler can consider when performing inlining. -For example we can set the attributes for :code:`@id_real`.:: +.. For example we can set the attributes for :code:`@id_real`.:: - attributes id_real { +.. attributes id_real { inline: true } - def id_real(%x:Real) { ret %x } +.. def id_real(%x:Real) { ret %x } ========= @@ -200,6 +180,6 @@ Programs ~~~~~~~~ Now that we have presented both global functions, and operators we have -everthing in hand to describe a complete Relay program. A Relay program consists of a +everything in hand to describe a complete Relay program. A Relay program consists of a registry of operators, one or more functions, as well as the global configuration stored in the environment. \ No newline at end of file diff --git a/docs/langref/relay/type_system.rst b/docs/langref/relay/type_system.rst index ce6ca5c52ee4..a4a346be522b 100644 --- a/docs/langref/relay/type_system.rst +++ b/docs/langref/relay/type_system.rst @@ -49,7 +49,7 @@ Type ~~~~ The base type for all Relay types. All Relay types are sub-classes of this base type. -See :py:class:`~tvm.relay.type.Type` for its definition and documentation. +See :py:class:`~tvm.relay.ty.Type` for its definition and documentation. Tensor Type ~~~~~~~~~~~ @@ -70,7 +70,7 @@ This restriction importantly means The shape may be any valid Relay shape as described in the below section on shapes. -See :py:class:`~tvm.relay.type.TensorType` for its definition and documentation. +See :py:class:`~tvm.relay.ty.TensorType` for its definition and documentation. Kind ~~~~ @@ -81,7 +81,7 @@ This controls what a type parameter is allowed to be instantiated with. For example one's of kind BaseType can only be `float32`, `int32`, and so on. -See :py:class:`~tvm.relay.type.Kind` for its definition and documentation. +See :py:class:`~tvm.relay.ty.Kind` for its definition and documentation. Type Parameter ~~~~~~~~~~~~~~ @@ -93,7 +93,7 @@ A type parameter represents a type placeholder which will be filled in later on. This allows the user to write functions which are generic over types. -See :py:class:`~tvm.relay.type.TypeParam` for its definition and documentation. +See :py:class:`~tvm.relay.ty.TypeParam` for its definition and documentation. Type Constriant ~~~~~~~~~~~~~~~ @@ -101,7 +101,7 @@ Type Constriant Abstract class representing a type constraint, to be elaborated on in further releases. -See :py:class:`~tvm.relay.type.TypeConstraint` for its definition and documentation. +See :py:class:`~tvm.relay.ty.TypeConstraint` for its definition and documentation. Function Type ~~~~~~~~~~~~~ @@ -115,7 +115,7 @@ being, a sequence of argument types, and a return type. We informally write them as: `forall (type_params), (arg_types) -> ret_type where type_constraints` -See :py:class:`~tvm.relay.type.FuncType` for its definition and documentation. +See :py:class:`~tvm.relay.ty.FuncType` for its definition and documentation. Type Relation ~~~~~~~~~~~~~ @@ -158,17 +158,17 @@ notational convenience for: That is the user may pick the type of the :code:`Lhs`, :code:`Rhs`, and :code:`Out` as long as we can show :code:`Broadcast(Lhs, Rhs, Out)` holds. -See :py:class:`~tvm.relay.type.TypeRelation` for its definition and documentation. +See :py:class:`~tvm.relay.ty.TypeRelation` for its definition and documentation. Type Call ~~~~~~~~~ Apply a type relation to a set of input arguments, at the present momen the type -call node represents the application of a :py:class:`~tvm.relay.type.TypeRelation` +call node represents the application of a :py:class:`~tvm.relay.ty.TypeRelation` to a set of input arguments. The result of type application is the output variable of the type relation. -See :py:class:`~tvm.relay.type.TypeCall` for its definition and documentation. +See :py:class:`~tvm.relay.ty.TypeCall` for its definition and documentation. Incomplete Type ~~~~~~~~~~~~~~~ @@ -177,4 +177,4 @@ A type, or portion of a type which is not known yet. Only used during type infer .. note:: Known as a "type variable" in the type checking literature. -See :py:class:`~tvm.relay.type.IncompleteType` for its definition and documentation. +See :py:class:`~tvm.relay.ty.IncompleteType` for its definition and documentation. From 980876a3bf86682fae1268c3c905535062a671bf Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Thu, 18 Oct 2018 16:36:32 -0700 Subject: [PATCH 10/28] More work on type system section --- docs/langref/relay/type_system.rst | 53 +++++++++--------------------- 1 file changed, 16 insertions(+), 37 deletions(-) diff --git a/docs/langref/relay/type_system.rst b/docs/langref/relay/type_system.rst index a4a346be522b..d3fc1611759e 100644 --- a/docs/langref/relay/type_system.rst +++ b/docs/langref/relay/type_system.rst @@ -28,21 +28,21 @@ on the expressivity of the input language. Because Relay is intended as an IR we require *some* type information to provide full inference. We don't believe this to be an issue as many of the IR builder -inferfaces require some type information, or can generate IR based on their own +interfaces require some type information, or can generate IR based on their own higher level inferences. We view this limited shape inference as a simpler form of type inference. Instead of relying on an ad-hoc procedure for recovering type information from a potentially dynamic program, we apply ideas from compiler and IR design. -Below we briefly dicsuss the different kinds of types in Relay. +Below we briefly discuss the different kinds of types in Relay. ===== Types ===== Relay's type system has a "language of types" which allow us to write down the type of -a Relay program. Below we detail the langauge of types and how we assign them to Relay +a Relay program. Below we detail the language of types and how we assign them to Relay programs. Type @@ -54,21 +54,12 @@ See :py:class:`~tvm.relay.ty.Type` for its definition and documentation. Tensor Type ~~~~~~~~~~~ -A concrete TensorType in Relay, see tvm/relay/type.h for more details. +A concrete TensorType in Relay, see `tvm/relay/type.h` for more details. -This is the type assigned to tensor's with a known dype and shape. For -example a tensor of `float32` and `(5, 5)`. - - - -Tensor values in Relay are typed with tensor types. A tensor type is -parametrized by a data type, and shape. The data type must be a base -type as enforced by the kind checking rules described in TODO. - -This restriction importantly means - -The shape may be any valid Relay shape as described in the below -section on shapes. +This is the type assigned to tensor's with a known dtype and shape. For +example a tensor of `float32` and `(5, 5)`. The data type must be a base +type as enforced by the kind checking rules described below. +The shape may be any valid Relay shape as described in the below section on shapes. See :py:class:`~tvm.relay.ty.TensorType` for its definition and documentation. @@ -87,7 +78,7 @@ Type Parameter ~~~~~~~~~~~~~~ A type parameter used for generic types in Relay, -see tvm/relay/type.h for more details. +see `tvm/relay/type.h` for more details. A type parameter represents a type placeholder which will be filled in later on. This allows the user to write @@ -95,7 +86,7 @@ functions which are generic over types. See :py:class:`~tvm.relay.ty.TypeParam` for its definition and documentation. -Type Constriant +Type Constraint ~~~~~~~~~~~~~~~ Abstract class representing a type constraint, to be elaborated @@ -105,12 +96,11 @@ See :py:class:`~tvm.relay.ty.TypeConstraint` for its definition and documentatio Function Type ~~~~~~~~~~~~~ -A function type in Relay, see tvm/relay/type.h for more details. +A function type in Relay, see `tvm/relay/type.h` for more details. -This is the type assigned to functions in Relay. They consist of -a list of type parameters which enable the definition of generic -functions, a set of type constraints which we omit for the time -being, a sequence of argument types, and a return type. +This is the type assigned to functions in Relay. A function type +is a list of type parameters, a set of type constraints, a sequence of argument +types, and a return type. We informally write them as: `forall (type_params), (arg_types) -> ret_type where type_constraints` @@ -121,9 +111,8 @@ Type Relation ~~~~~~~~~~~~~ A type relation is the most exotic type system feature in Relay. It allows -users to extend type and shape checking/inference with new rules. We use -type relations to type operators with "hard" types such as broadcasting -operators, or special ones like :code:`flatten`. +users to extend type inference with new rules. We use type relations to type operators with +"hard" types such as broadcasting operators, or :code:`flatten`. A type relation :code:`R` is a n-ary input, single output relation over types. To unpack that, it allows us to specify a relationship between @@ -160,16 +149,6 @@ show :code:`Broadcast(Lhs, Rhs, Out)` holds. See :py:class:`~tvm.relay.ty.TypeRelation` for its definition and documentation. -Type Call -~~~~~~~~~ - -Apply a type relation to a set of input arguments, at the present momen the type -call node represents the application of a :py:class:`~tvm.relay.ty.TypeRelation` -to a set of input arguments. The result of type application is the output variable -of the type relation. - -See :py:class:`~tvm.relay.ty.TypeCall` for its definition and documentation. - Incomplete Type ~~~~~~~~~~~~~~~ From d2307e304314e028967b70f7640c4104d9ee971f Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Thu, 18 Oct 2018 16:41:02 -0700 Subject: [PATCH 11/28] Fix missing article --- docs/langref/relay/intro.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/langref/relay/intro.rst b/docs/langref/relay/intro.rst index 9a55964d7df9..a67870e71d62 100644 --- a/docs/langref/relay/intro.rst +++ b/docs/langref/relay/intro.rst @@ -2,7 +2,7 @@ Introduction ============ -Relay is differentiable programming language with support for +Relay is a differentiable programming language with support for closures, control-flow, and recursion. It has an advanced static type system specifically designed for programs written by machine learning practitioners and researchers. Relay is intended to replace From 91ccd3b94033dcedb9eb3614efac135e45ca16ff Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Thu, 18 Oct 2018 16:49:29 -0700 Subject: [PATCH 12/28] A couple more types --- docs/langref/relay/intro.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/langref/relay/intro.rst b/docs/langref/relay/intro.rst index a67870e71d62..ac5f82caabbe 100644 --- a/docs/langref/relay/intro.rst +++ b/docs/langref/relay/intro.rst @@ -12,9 +12,9 @@ has organically evolved a representation that was useful for the form of computation originally desired, i.e a directed acyclic graph of primitive functions. -Computation graphs were a useful way to describe ML models with static -topology, but make representing control flow, and abstraction tricky. -Computation graphs have dual purpose as both a compile-time and run-time data +Computation graphs are a good fit for ML models with static topology, but make +representing control flow, and abstraction tricky. Computation graphs have dual +purpose as both a compile-time and run-time data structure. The conflation of the description of a computation, the representation used for optimizing it, and the data structure used to execute it unnecessarily hampers many goals of machine From d2639e3777e6d1d6940780a0e8cd686886f9cfc7 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Fri, 19 Oct 2018 10:08:18 -0700 Subject: [PATCH 13/28] More feedback --- docs/langref/relay/intro.rst | 9 ++++----- docs/langref/relay/type_system.rst | 32 +++++++++++++----------------- 2 files changed, 18 insertions(+), 23 deletions(-) diff --git a/docs/langref/relay/intro.rst b/docs/langref/relay/intro.rst index ac5f82caabbe..b9169fe47a65 100644 --- a/docs/langref/relay/intro.rst +++ b/docs/langref/relay/intro.rst @@ -68,8 +68,8 @@ Globals are written with `@`, locals are written with `%`, variables written wit sigil name the corresponding operator. The distinction between global and local identifiers makes certain kinds of transformation easier. For example inlining a global definition requires no analysis, you can write a pass that just directly inlines the definitions. -It also ensures there are no spooky action at a distance, introducing a new identifier -of any type will never introduce an ambiguity to the program. +Ensuring there is no spooky action at a distance; introducing a new identifier return +type is omitted we will infer the return type based on the text of the program. Global Variable @@ -165,13 +165,12 @@ Operators ========= A primitive operation that is not defined in the Relay language but provided -externally. Currently we back these operators registrations with the operators +externally. Currently we back these operator's registrations with the operators exposed by TVM's TOPI. An operator requires a user to provide an implementation -of the operator, its type and various attributes required by Relay subsystems. +of the operator, its type, and various required attributes. The input methods for Relay programs do not provide a way to describe operators in Relay, they must be explicitly registered in the global environment via Python or C++. - Operators are rendered without a sigil (e.g :code:`add`, :code:`subtract`) when pretty printing Relay programs. Operators are explicitly contained in the program and are uniquely identifiable by pointer during a run of the Relay compiler. diff --git a/docs/langref/relay/type_system.rst b/docs/langref/relay/type_system.rst index d3fc1611759e..948822a1683f 100644 --- a/docs/langref/relay/type_system.rst +++ b/docs/langref/relay/type_system.rst @@ -3,39 +3,35 @@ Type System =========== We have briefly introduced types while detailing the the expression language -of Relay, but have fully laid out the type system. - -Although the majority of Relay programs are written without type annotations, -Relay is statically typed. +of Relay, but have not yet described the type system. Relay is +a statically typed, and type inferred language, allowing programs to +be typed with a minimum amount of type information. Static types are useful because they enable efficient layout, memory reuse, and -code generation. They aid in debugging program transformations, but can also -give us the expressivity afforded by more dynamic langauges. +code generation. They aid in debugging program transformations, but can also give us the +expressivity afforded by more dynamic languages. We are able to omit these type annotations by a process known as type inference. Type inference is a technique that has its roots in the programming language community, and can be viewed as a method for generalizing shape inference to run over arbitrary user programs containing control flow and recursion. -Static types are useful when performing compiler optimization because they +Static types are useful when performing compiler optimizations because they communicate properties about the data we manipulate, such as runtime shape, -data layout, storage without needing to run the program. +data layout, and storage without needing to run the program. -Most current IRs use "shape inference" to recover Tensor dimensions from the user +Most current IRs use "shape inference" to recover tensor dimensions from the user provided program. Machine learning users have enjoyed shape inference for tensors because it allows them to generate performant code without giving up -on the expressivity of the input language. - -Because Relay is intended as an IR we require *some* type information to provide -full inference. We don't believe this to be an issue as many of the IR builder -interfaces require some type information, or can generate IR based on their own -higher level inferences. +on the expressivity of the input language. Because Relay is intended as an IR we +require *some* type information to provide full inference. We don't believe this to be +an issue as many of the IR builder interfaces require some type information, or can +generate IR based on their own higher level inferences. We view this limited shape inference as a simpler form of type inference. Instead of relying on an ad-hoc procedure for recovering type -information from a potentially dynamic program, we apply ideas from compiler and IR design. - -Below we briefly discuss the different kinds of types in Relay. +information from a potentially dynamic program, we apply ideas from compiler +and IR design. Below we briefly discuss the different kinds of types in Relay. ===== Types From 197ce7b8efa841765daa3c213173285d9a3a9ad5 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Fri, 19 Oct 2018 11:04:24 -0700 Subject: [PATCH 14/28] Address a few more comments --- docs/langref/relay/intro.rst | 18 +++++++++--------- docs/langref/relay/type_system.rst | 10 +++++----- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/docs/langref/relay/intro.rst b/docs/langref/relay/intro.rst index b9169fe47a65..cc3dd15616ed 100644 --- a/docs/langref/relay/intro.rst +++ b/docs/langref/relay/intro.rst @@ -5,22 +5,22 @@ Introduction Relay is a differentiable programming language with support for closures, control-flow, and recursion. It has an advanced static type system specifically designed for programs written -by machine learning practitioners and researchers. Relay is intended to replace -the computation graph based intermediate representations currently employed by deep -learning frameworks and compilers. The deep learning community +by machine learning practitioners and researchers. The deep learning community has organically evolved a representation that was useful for the form of computation originally desired, i.e -a directed acyclic graph of primitive functions. +a directed acyclic graph of primitive functions. Relay is intended to replace +the computation graph based intermediate representations currently employed by deep +learning frameworks and compilers. Computation graphs are a good fit for ML models with static topology, but make representing control flow, and abstraction tricky. Computation graphs have dual purpose as both a compile-time and run-time data structure. The conflation of the description of a computation, the representation used for optimizing it, and the data structure -used to execute it unnecessarily hampers many goals of machine +used to execute it unnecessarily hampers the goals of machine learning frameworks. We believe having a high level, expressive language designed -for compiler optimizations is essential to the future of an -end-to-end deep learning compiler stack. +for writing compiler optimizations is essential to the future of an end-to-end +deep learning compiler stack. Relay's design is influenced by the authors' experience building advanced optimizing compilers for high level languages, as well as challenges presented by the current version @@ -125,12 +125,12 @@ They may only appear in function literals, and definitions, and have no relation to parameters in the machine learning. When the type information is omitted we will attempt to infer a most general type -for the users. This property is known as generalization, for a definition without +for the users. This property is known as generalization: for a definition without explicit annotations, we will attempt to assign the most general type. When the return type is omitted we will infer the return type based on the text of the program. -Finally we can directly construct type polymorphic definitions by writing down +Finally we can directly construct type-polymorphic definitions by writing down a set of type parameters for a definition. To define a polymorphic identity function, the function which just returns its argument as so. :: diff --git a/docs/langref/relay/type_system.rst b/docs/langref/relay/type_system.rst index 948822a1683f..be7e4ebbfb95 100644 --- a/docs/langref/relay/type_system.rst +++ b/docs/langref/relay/type_system.rst @@ -29,7 +29,7 @@ an issue as many of the IR builder interfaces require some type information, or generate IR based on their own higher level inferences. We view this limited shape inference as a simpler form of type -inference. Instead of relying on an ad-hoc procedure for recovering type +inference. Instead of relying on an ad hoc procedure for recovering type information from a potentially dynamic program, we apply ideas from compiler and IR design. Below we briefly discuss the different kinds of types in Relay. @@ -37,9 +37,9 @@ and IR design. Below we briefly discuss the different kinds of types in Relay. Types ===== -Relay's type system has a "language of types" which allow us to write down the type of -a Relay program. Below we detail the language of types and how we assign them to Relay -programs. +Relay's type system has a "language of types" which enables one to write down the type of +a Relay program. Below we detail the language of types and how we assign types to Relay +expressions. Type ~~~~ @@ -110,7 +110,7 @@ A type relation is the most exotic type system feature in Relay. It allows users to extend type inference with new rules. We use type relations to type operators with "hard" types such as broadcasting operators, or :code:`flatten`. -A type relation :code:`R` is a n-ary input, single output relation over +A type relation :code:`R` is a n-ary input, single-output relation over types. To unpack that, it allows us to specify a relationship between a set of input and output types. From 75629afb95d8c9c9fedf0926782eeaa34d253bfa Mon Sep 17 00:00:00 2001 From: "Steven S. Lyubomirsky" Date: Fri, 19 Oct 2018 18:16:22 -0700 Subject: [PATCH 15/28] Grammatical edits to expressions.rst --- docs/langref/relay/expressions.rst | 55 +++++++++++++++--------------- 1 file changed, 27 insertions(+), 28 deletions(-) diff --git a/docs/langref/relay/expressions.rst b/docs/langref/relay/expressions.rst index 3275be29d163..f9644d30d7ff 100644 --- a/docs/langref/relay/expressions.rst +++ b/docs/langref/relay/expressions.rst @@ -3,17 +3,16 @@ Expressions =========== The Relay IR is a pure, expression-oriented language with distinct dataflow -and control flow language fragments. Although Relay's representation is an abstract syntax -tree, it is possible to view the dataflow fragment as graph for purposes of writing and -expressing transformations. +and control flow language fragments. Although Relay uses abstract syntax trees to represent programs, +the dataflow fragments of a program can be considered as a graph when writing and expressing transformations. -The below sections make an attempt to clearly split the expressions which +The below sections make an attempt to clearly split the expressions that are pure dataflow (equivalent to traditional computation graphs) from -the extended expressions which contain control flow. +the extended expressions that contain control flow. -==================== -Dataflow Expressions -==================== +================= +Dataflow Fragment +================= First we will cover the set of expressions which do not involve control flow; this fragment of the language is semantically equivalent to pure computation graphs @@ -22,7 +21,7 @@ without control flow. Constants ~~~~~~~~~ -Relay programs can contain constant Tensor values. This node represents +Relay programs can contain constant tensor values. This node represents a constant tensor value (see :py:mod:~tvm.relay.Value for more details). The constants are represented as :py:class:`~tvm.NDArray`, allowing us to utilize TVM operators for constant evaluation. @@ -32,9 +31,9 @@ See :py:class:`~tvm.relay.expr.Constant` for its definition and documentation. Tuple ~~~~~ -We support tuple constructors; the tuple node builds a finite (i.e statically known size) sequence of -heterogeneous data. These tuples match closely to Python's and enable efficient projection of their -members due to their fixed length. +The tuple node builds a finite (that is, of statically known size) sequence of heterogeneous data. +These tuples match Python's closely. Their fixed length allows for efficient projection of their +members. .. code-block:: python @@ -54,8 +53,8 @@ parameters, a return type, and a body. fun (x : Float, y: Float) -> Float { x + y } -Functions are first class in Relay, and can be used in any expression -position. Functions expressions are the same as global functions, but do not +Functions are first-class in Relay and can be used in any expression +position. Function expressions are the same as global functions, but do not have a globally unique name. You can use a function in conjunction with a let binding to define locally recursive functions. @@ -75,10 +74,10 @@ See :py:class:`~tvm.relay.expr.Function` for its definition and documentation. Variables ~~~~~~~~~ -Both global variables, and local variables are valid expressions, one may use them +Both global variables and local variables are valid expressions; one may use them anywhere an expression may appear. -For example the below fragment of code is a valid expression. +For example, the below fragment of code is a valid expression. .. code-block:: python %ret = @global(op_name, %local) @@ -89,10 +88,10 @@ and documentation. Let Binding ~~~~~~~~~~~ -An immutable variable binding, allows the user to bind an -expression to a name. A let binding contains a local variable, -an optional type annotation, a value, and body expression -which may reference the bound identifier. +A let binding is an immutable variable binding, allowing the user +to bind an expression to a name. A let binding contains a local variable, +an optional type annotation, a value, and a body expression +that may reference the bound identifier. We will first introduce a single binding without type annotations: @@ -104,10 +103,10 @@ type annotations: The value of a let binding is the value of the final expression after evaluating the bindings it depends on. -A sequence of let bindings can be viewed as a dataflow graph, +A sequence of let bindings can be considered as a dataflow graph, where the bindings are a series of sub-graphs connected by bound variables. Since these binding sequences are -pure, we can evaluate them in any order up to the program +pure, we can evaluate them in any order according to the program dataflow. For example the below Relay program is equivalent to the @@ -133,12 +132,12 @@ based on the value of previously executed expressions. Call ~~~~ -Expressions with function types in Relay are "callable", i.e they can be invoked using -a function call. +Expressions with function types in Relay are "callable," meaning that they can be +invoked via a function call. -All Relay functions are typed with function types, as well as all Relay operators. +All Relay functions are typed with function types, as are all Relay operators. -For example we can call the previously defined `fact` because it has a function +For example, we can call the previously defined `fact` because it has a function type: .. code-block:: python @@ -149,8 +148,8 @@ See :py:class:`~tvm.relay.expr.Call` for its definition and documentation. If-Then-Else ~~~~~~~~~~~~ -Relay has a simple if-then-else expression which allows programs to branch -on a single value of type :code:`bool`, i.e a zero-rank +Relay has a simple if-then-else expression that allows programs to branch +on a single value of type :code:`bool`, i.e., a zero-rank tensor of booleans (:code:`Tensor[(), bool]`). .. code-block:: python From cfb92ad6233579305a7c1a08a7def99f0d23b87e Mon Sep 17 00:00:00 2001 From: "Steven S. Lyubomirsky" Date: Fri, 19 Oct 2018 18:19:03 -0700 Subject: [PATCH 16/28] Grammatical tweaks to index.rst --- docs/langref/relay/index.rst | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/docs/langref/relay/index.rst b/docs/langref/relay/index.rst index c61cbc0fc97b..1b05697cbb03 100644 --- a/docs/langref/relay/index.rst +++ b/docs/langref/relay/index.rst @@ -2,12 +2,11 @@ Relay Language Reference ======================== This document is a work in progress language reference describing -Relay, TVM's high level intermediate representation. The name is an -allusion to interneurons which are often referred to as intermediate, -or Relay neurons. +Relay, TVM's high-level intermediate representation. The name is an +allusion to interneurons, which are often referred to as intermediate +or relay neurons. -We will continually iterate on this document as we evolve the new IR -and update accordingly. +As we evolve the new IR, we will update this document accordingly. .. toctree:: :maxdepth: 2 From c3f76564e14278175b320f5f04b51e1f8a669849 Mon Sep 17 00:00:00 2001 From: "Steven S. Lyubomirsky" Date: Fri, 19 Oct 2018 18:34:51 -0700 Subject: [PATCH 17/28] Stylistic and grammar edits to intro.rst --- docs/langref/relay/intro.rst | 96 ++++++++++++++++++------------------ 1 file changed, 47 insertions(+), 49 deletions(-) diff --git a/docs/langref/relay/intro.rst b/docs/langref/relay/intro.rst index cc3dd15616ed..a0ecd8a96f06 100644 --- a/docs/langref/relay/intro.rst +++ b/docs/langref/relay/intro.rst @@ -3,32 +3,30 @@ Introduction ============ Relay is a differentiable programming language with support for -closures, control-flow, and recursion. It has an advanced +closures, control flow, and recursion. It has an advanced static type system specifically designed for programs written -by machine learning practitioners and researchers. The deep learning community -has organically evolved a representation that was useful -for the form of computation originally desired, i.e -a directed acyclic graph of primitive functions. Relay is intended to replace -the computation graph based intermediate representations currently employed by deep +by machine learning practitioners and researchers. Relay is intended to replace +the computation graph-based intermediate representations currently employed by deep learning frameworks and compilers. -Computation graphs are a good fit for ML models with static topology, but make -representing control flow, and abstraction tricky. Computation graphs have dual -purpose as both a compile-time and run-time data -structure. The conflation of the description of a computation, the -representation used for optimizing it, and the data structure -used to execute it unnecessarily hampers the goals of machine -learning frameworks. We believe having a high level, expressive language designed -for writing compiler optimizations is essential to the future of an end-to-end +The deep learning community has organically evolved a representation of machine learning +models that was useful for the form of computation originally desired, i.e., a directed +acyclic graph of primitive functions. Computation graphs also serve a dual purpose as both a +compile-time and run-time data structure. While computation graphs are a good fit +for models with a static topology, they make it tricky to incorporate control flow and +abstraction. Additionally, the conflation of the description of a computation, the +representation used for optimizing it, and the data structure used to execute it unnecessarily +hamper the goals of machine learning frameworks. We believe having a high-level, expressive language +designed for writing compiler optimizations is essential to the future of an end-to-end deep learning compiler stack. Relay's design is influenced by the authors' experience building advanced optimizing compilers -for high level languages, as well as challenges presented by the current version +for high-level languages, as well as by challenges presented by the current version of the TVM stack, and NNVM's IR. We address a few important challenges with Relay's design. -Relay is an IR with closures, control-flow, recursion, and advanced type system supporting, -complex shape relationships, and symbolic dimensions. We can define a series of -automatic-differentiation over the language, with the goal of enabling higher-order -differentiation of programs with control-flow and closures. +Relay is an IR with closures, control-flow, recursion, and an advanced type system supporting +complex shape relationships and symbolic dimensions. We define a series of +automatic differentiation rules over the language, with the goal of enabling higher-order +differentiation of programs with control flow and closures. ================== Language @@ -40,9 +38,9 @@ Relay is a purely functional, differentiable intermediate representation. IR Reference ================== -The IR has a global environment which stores the set of definitions, -constants, options, attributes, and provides access to features like -type inference, constant evaluation, and more. +The IR has a global environment that stores the set of definitions, +constants, options, and attributes, and provides access to features +including type inferecnce, constant evaluation, and more. ~~~~~~~~~~ Relay Node @@ -59,15 +57,15 @@ The fundamental unit of the IR is the node, which only contains a Span. Variables ================== -Relay has two notions of variables local, and global. -Our design draws inspiration from LLVM which differentiates between identifier types. +Relay has two notions of variables: local and global. +Our design draws inspiration from LLVM, which differentiates between identifier types. This enables writers of optimizations to know precisely what an identifier references without needing information beyond the kind of identifier. -Globals are written with `@`, locals are written with `%`, variables written without a +Globals are written with `@`, locals are written with `%`, and variables written without a sigil name the corresponding operator. The distinction between global and local identifiers -makes certain kinds of transformation easier. For example inlining a global definition -requires no analysis, you can write a pass that just directly inlines the definitions. +makes certain kinds of transformation easier. For example, inlining a global definition +requires no analysis, as you can write a pass that directly inlines the definitions. Ensuring there is no spooky action at a distance; introducing a new identifier return type is omitted we will infer the return type based on the text of the program. @@ -92,25 +90,25 @@ Global Functions ================ A definition consists of a name, type parameter, parameters, and an optional return -type. A global function is no different then a procedures or function in a typical programming -language, and generalize the concept of a named subgraph. +type. A global function is no different from a procedure or function in a typical programming +language and generalizes the concept of a named subgraph. A definition minimally consists of an identifier :code:`@id`, an empty set of -parameters, and a body expression contained by curly braces +parameters, and a body expression contained by curly braces. .. code-block:: python def @id() { body } -A definition may also contain any number of parameters, for example a -simple function which just adds two tensors +A definition may also contain any number of parameters. For example, a +simple function which just adds two tensors: .. code-block:: python def @add(%x, %y) { %x + %y } -It is also possible for us to annotate explicit types on definitions, for example -we can restrict the above definition to only work on certain types +It is also possible for us to annotate explicit types on definitions. For example, +we can restrict the above definition to only work on certain types: .. code-block:: python @@ -121,24 +119,24 @@ we can restrict the above definition to only work on certain types A parameter is just a pairing of a :py:class:`~tvm.relay.expr.LocalVar` and optional :py:class:`~tvm.relay.ty.Type`. They represent the formal parameters of functions and definitions, and are written as :code:`%x : T`. -They may only appear in function literals, and definitions, and have no relation +They may only appear in function literals and definitions, and have no relation to parameters in the machine learning. -When the type information is omitted we will attempt to infer a most general type +When the type information is omitted, we will attempt to infer a most general type for the users. This property is known as generalization: for a definition without explicit annotations, we will attempt to assign the most general type. When the -return type is omitted we will infer the return type based on the text of the +return type is omitted, we will infer the return type based on the text of the program. -Finally we can directly construct type-polymorphic definitions by writing down -a set of type parameters for a definition. To define a polymorphic identity -function, the function which just returns its argument as so. +Finally, we can directly construct type-polymorphic definitions by writing down +a set of type parameters for a definition. For example, one can definte a +polymorphic identity function for tensors as follows: :: def @id(%x: Tensor) { %x } -Notice we can omit the return type, and it will still be inferred. +Notice we can omit the return type and it will still be inferred. .. *Note: this is not yet implemented.* @@ -146,8 +144,8 @@ Notice we can omit the return type, and it will still be inferred. extra properties to the definition. .. It is important to be able to annotate metadata that is external to -the computational behavior of a definition. For example we can use -this to add an `inline` or `noinline` attribute which the compiler +the computational behavior of a definition. For example, we can use +this to add an `inline` or `noinline` attribute that the compiler can consider when performing inlining. .. For example we can set the attributes for :code:`@id_real`.:: @@ -164,21 +162,21 @@ can consider when performing inlining. Operators ========= -A primitive operation that is not defined in the Relay language but provided +An operator is a primitive operation that is not defined in the Relay language but is provided externally. Currently we back these operator's registrations with the operators exposed by TVM's TOPI. An operator requires a user to provide an implementation of the operator, its type, and various required attributes. The input methods for Relay programs do not provide a way to describe operators in -Relay, they must be explicitly registered in the global environment via Python or C++. -Operators are rendered without a sigil (e.g :code:`add`, :code:`subtract`) when pretty -printing Relay programs. Operators are explicitly contained in the program and are uniquely +Relay; they must be explicitly registered in the global environment via Python or C++. +Operators are rendered without a sigil (e.g :code:`add`, :code:`subtract`) when pretty- +printing Relay programs. Operators are explicitly contained in the program and are uniquely identifiable by pointer during a run of the Relay compiler. Programs ~~~~~~~~ -Now that we have presented both global functions, and operators we have +Now that we have presented both global functions and operators, we have everything in hand to describe a complete Relay program. A Relay program consists of a registry of operators, one or more functions, as well as the global configuration -stored in the environment. \ No newline at end of file +stored in the environment. From 1e5a9a5ba9b2077dcf71589deedf4e1cd54b3087 Mon Sep 17 00:00:00 2001 From: "Steven S. Lyubomirsky" Date: Fri, 19 Oct 2018 18:37:34 -0700 Subject: [PATCH 18/28] Smaller tweaks to intro.rst --- docs/langref/relay/intro.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/langref/relay/intro.rst b/docs/langref/relay/intro.rst index a0ecd8a96f06..9f5a9cab4b2c 100644 --- a/docs/langref/relay/intro.rst +++ b/docs/langref/relay/intro.rst @@ -89,7 +89,7 @@ identifier as :code:`%local`. Global Functions ================ -A definition consists of a name, type parameter, parameters, and an optional return +A function definition consists of a name, parameters, type parameters, and an optional return type. A global function is no different from a procedure or function in a typical programming language and generalizes the concept of a named subgraph. @@ -119,10 +119,10 @@ we can restrict the above definition to only work on certain types: A parameter is just a pairing of a :py:class:`~tvm.relay.expr.LocalVar` and optional :py:class:`~tvm.relay.ty.Type`. They represent the formal parameters of functions and definitions, and are written as :code:`%x : T`. -They may only appear in function literals and definitions, and have no relation +Parameters may only appear in function literals and definitions and have no relation to parameters in the machine learning. -When the type information is omitted, we will attempt to infer a most general type +When the type information is omitted, we will attempt to infer the most general type for the users. This property is known as generalization: for a definition without explicit annotations, we will attempt to assign the most general type. When the return type is omitted, we will infer the return type based on the text of the From 49d453ab7bb2e58d2742a1285307a88b14b97e3d Mon Sep 17 00:00:00 2001 From: "Steven S. Lyubomirsky" Date: Fri, 19 Oct 2018 18:53:52 -0700 Subject: [PATCH 19/28] Grammar and style changes in type_system.rst --- docs/langref/relay/type_system.rst | 38 ++++++++++++++++-------------- 1 file changed, 20 insertions(+), 18 deletions(-) diff --git a/docs/langref/relay/type_system.rst b/docs/langref/relay/type_system.rst index be7e4ebbfb95..bb66d866e935 100644 --- a/docs/langref/relay/type_system.rst +++ b/docs/langref/relay/type_system.rst @@ -4,8 +4,8 @@ Type System We have briefly introduced types while detailing the the expression language of Relay, but have not yet described the type system. Relay is -a statically typed, and type inferred language, allowing programs to -be typed with a minimum amount of type information. +a statically typed and type-inferred language, allowing programs to +be typed with a minimal requirement of explicit type information. Static types are useful because they enable efficient layout, memory reuse, and code generation. They aid in debugging program transformations, but can also give us the @@ -23,15 +23,15 @@ data layout, and storage without needing to run the program. Most current IRs use "shape inference" to recover tensor dimensions from the user provided program. Machine learning users have enjoyed shape inference for tensors because it allows them to generate performant code without giving up -on the expressivity of the input language. Because Relay is intended as an IR we +on the expressivity of the input language. Because Relay is intended as an IR, we require *some* type information to provide full inference. We don't believe this to be -an issue as many of the IR builder interfaces require some type information, or can -generate IR based on their own higher level inferences. +an issue, as many of the IR builder interfaces require some type information or can +generate IR based on their own higher-level inferences. We view this limited shape inference as a simpler form of type inference. Instead of relying on an ad hoc procedure for recovering type information from a potentially dynamic program, we apply ideas from compiler -and IR design. Below we briefly discuss the different kinds of types in Relay. +and IR design. Below, we briefly discuss the different kinds of types in Relay. ===== Types @@ -43,6 +43,7 @@ expressions. Type ~~~~ + The base type for all Relay types. All Relay types are sub-classes of this base type. See :py:class:`~tvm.relay.ty.Type` for its definition and documentation. @@ -61,7 +62,8 @@ See :py:class:`~tvm.relay.ty.TensorType` for its definition and documentation. Kind ~~~~ -The kind of a type parameter, represents a variable shape, + +The kind of a type parameter, representing a variable shape, base type, type, or dimension. This controls what a type parameter is allowed to be instantiated @@ -76,9 +78,9 @@ Type Parameter A type parameter used for generic types in Relay, see `tvm/relay/type.h` for more details. -A type parameter represents a type placeholder which will +A type parameter represents a type placeholder that will be filled in later on. This allows the user to write -functions which are generic over types. +functions that are generic over types. See :py:class:`~tvm.relay.ty.TypeParam` for its definition and documentation. @@ -86,7 +88,7 @@ Type Constraint ~~~~~~~~~~~~~~~ Abstract class representing a type constraint, to be elaborated -on in further releases. +upon in further releases. See :py:class:`~tvm.relay.ty.TypeConstraint` for its definition and documentation. @@ -94,8 +96,8 @@ Function Type ~~~~~~~~~~~~~ A function type in Relay, see `tvm/relay/type.h` for more details. -This is the type assigned to functions in Relay. A function type -is a list of type parameters, a set of type constraints, a sequence of argument +This is the type assigned to functions in Relay. A function type consists of +a list of type parameters, a set of type constraints, a sequence of argument types, and a return type. We informally write them as: @@ -110,7 +112,7 @@ A type relation is the most exotic type system feature in Relay. It allows users to extend type inference with new rules. We use type relations to type operators with "hard" types such as broadcasting operators, or :code:`flatten`. -A type relation :code:`R` is a n-ary input, single-output relation over +A type relation :code:`R` is an n-ary input, single-output relation over types. To unpack that, it allows us to specify a relationship between a set of input and output types. @@ -125,22 +127,22 @@ Or we can define the relation for flatten: Flatten(Tensor(sh, bt), O) :- O = Tensor(sh[0], prod(sh[1:])) -The above examples are written in Prolog-like syntax but currently the relations +The above examples are written in Prolog-like syntax, but currently the relations must be implemented by users in C++ or Python. If we have a relation such as :code:`Broadcast` it becomes possible to type things -such as :code:`elemwise_add`: +like :code:`elemwise_add`: .. code-block:: python elemwise_add : forall (Lhs : Type) (Rhs : Type), (Lhs, Rhs) -> Broadcast(Lhs, Rhs) -You might ask why we write the relation in the return type but we use it as a +You might ask why we write the relation in the return type. We use it as a notational convenience for: .. code-block:: python elemwise_add : forall (Lhs : Type) (Rhs : Type) (Out : Type), Broadcast(Lhs, Rhs, Out) => (Lhs, Rhs) -> Out -That is the user may pick the type of the :code:`Lhs`, :code:`Rhs`, and :code:`Out` as long as we can +That is, the user may pick the type of the :code:`Lhs`, :code:`Rhs`, and :code:`Out` as long as we can show :code:`Broadcast(Lhs, Rhs, Out)` holds. See :py:class:`~tvm.relay.ty.TypeRelation` for its definition and documentation. @@ -148,7 +150,7 @@ See :py:class:`~tvm.relay.ty.TypeRelation` for its definition and documentation. Incomplete Type ~~~~~~~~~~~~~~~ -A type, or portion of a type which is not known yet. Only used during type inference. +A type or portion of a type that is not yet known. Only used during type inference. .. note:: Known as a "type variable" in the type checking literature. From 402750fef187ab9451444fc14ce76b3c83ca9dd2 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Fri, 30 Nov 2018 22:51:06 -0800 Subject: [PATCH 20/28] Remove language reference --- docs/langref/index.rst | 1 - docs/langref/relay/expressions.rst | 162 ------------------------- docs/langref/relay/index.rst | 16 --- docs/langref/relay/intro.rst | 182 ----------------------------- docs/langref/relay/type_system.rst | 157 ------------------------- 5 files changed, 518 deletions(-) delete mode 100644 docs/langref/relay/expressions.rst delete mode 100644 docs/langref/relay/index.rst delete mode 100644 docs/langref/relay/intro.rst delete mode 100644 docs/langref/relay/type_system.rst diff --git a/docs/langref/index.rst b/docs/langref/index.rst index 757835c94cb9..22ca00f7faa5 100644 --- a/docs/langref/index.rst +++ b/docs/langref/index.rst @@ -8,4 +8,3 @@ embedded languages in TVM stack. relay_op hybrid_script - relay/index diff --git a/docs/langref/relay/expressions.rst b/docs/langref/relay/expressions.rst deleted file mode 100644 index f9644d30d7ff..000000000000 --- a/docs/langref/relay/expressions.rst +++ /dev/null @@ -1,162 +0,0 @@ -=========== -Expressions -=========== - -The Relay IR is a pure, expression-oriented language with distinct dataflow -and control flow language fragments. Although Relay uses abstract syntax trees to represent programs, -the dataflow fragments of a program can be considered as a graph when writing and expressing transformations. - -The below sections make an attempt to clearly split the expressions that -are pure dataflow (equivalent to traditional computation graphs) from -the extended expressions that contain control flow. - -================= -Dataflow Fragment -================= - -First we will cover the set of expressions which do not involve control flow; -this fragment of the language is semantically equivalent to pure computation graphs -without control flow. - -Constants -~~~~~~~~~ - -Relay programs can contain constant tensor values. This node represents -a constant tensor value (see :py:mod:~tvm.relay.Value for more details). -The constants are represented as :py:class:`~tvm.NDArray`, allowing us to utilize -TVM operators for constant evaluation. - -See :py:class:`~tvm.relay.expr.Constant` for its definition and documentation. - -Tuple -~~~~~ - -The tuple node builds a finite (that is, of statically known size) sequence of heterogeneous data. -These tuples match Python's closely. Their fixed length allows for efficient projection of their -members. - -.. code-block:: python - - (a, b, c) : Tuple - - (a + b + c, d) : Tuple, Tensor> - -See :py:class:`~tvm.relay.expr.Tuple` for its definition and documentation. - -Function -~~~~~~~~ - -A function node represents a function; it contains a sequence of -parameters, a return type, and a body. - -.. code-block:: python - - fun (x : Float, y: Float) -> Float { x + y } - -Functions are first-class in Relay and can be used in any expression -position. Function expressions are the same as global functions, but do not -have a globally unique name. You can use a function in conjunction with a let -binding to define locally recursive functions. - -.. code-block:: python - - let fact = fun (x : Float) -> Float { - if (x == 0) { - 0 - } else { - x * fact(x - 1) - } - }; - fact(10) - -See :py:class:`~tvm.relay.expr.Function` for its definition and documentation. - -Variables -~~~~~~~~~ - -Both global variables and local variables are valid expressions; one may use them -anywhere an expression may appear. - -For example, the below fragment of code is a valid expression. - -.. code-block:: python - %ret = @global(op_name, %local) - -See :py:class:`~tvm.relay.expr.LocalVar` and :py:class:`~tvm.expr.GlobalVar` for its definition -and documentation. - -Let Binding -~~~~~~~~~~~ - -A let binding is an immutable variable binding, allowing the user -to bind an expression to a name. A let binding contains a local variable, -an optional type annotation, a value, and a body expression -that may reference the bound identifier. - -We will first introduce a single binding without -type annotations: - -.. code-block:: python - let %x = %a + %b; - x - -The value of a let binding is the value of the final expression -after evaluating the bindings it depends on. - -A sequence of let bindings can be considered as a dataflow graph, -where the bindings are a series of sub-graphs connected -by bound variables. Since these binding sequences are -pure, we can evaluate them in any order according to the program -dataflow. - -For example the below Relay program is equivalent to the -below NNVM program. - -.. code-block:: python - let %y_pred = %x * %w + %b; - let %loss = pow(%y - %y_pred, 2); - ret %loss - -.. code-block:: python - TODO - -See :py:class:`~tvm.relay.expr.Let` for its definition and documentation. - -======================= -Control Flow Expression -======================= - -Control flow expressions enable network topology to change based -based on the value of previously executed expressions. - -Call -~~~~ - -Expressions with function types in Relay are "callable," meaning that they can be -invoked via a function call. - -All Relay functions are typed with function types, as are all Relay operators. - -For example, we can call the previously defined `fact` because it has a function -type: - -.. code-block:: python - fact(10) - -See :py:class:`~tvm.relay.expr.Call` for its definition and documentation. - -If-Then-Else -~~~~~~~~~~~~ - -Relay has a simple if-then-else expression that allows programs to branch -on a single value of type :code:`bool`, i.e., a zero-rank -tensor of booleans (:code:`Tensor[(), bool]`). - -.. code-block:: python - if (sum(equal(t, u))) { - return x: - } else { - return y; - } - -See :py:class:`~tvm.relay.expr.If` for its definition and documentation. diff --git a/docs/langref/relay/index.rst b/docs/langref/relay/index.rst deleted file mode 100644 index 1b05697cbb03..000000000000 --- a/docs/langref/relay/index.rst +++ /dev/null @@ -1,16 +0,0 @@ -Relay Language Reference -======================== - -This document is a work in progress language reference describing -Relay, TVM's high-level intermediate representation. The name is an -allusion to interneurons, which are often referred to as intermediate -or relay neurons. - -As we evolve the new IR, we will update this document accordingly. - -.. toctree:: - :maxdepth: 2 - - intro - expressions - type_system diff --git a/docs/langref/relay/intro.rst b/docs/langref/relay/intro.rst deleted file mode 100644 index 9f5a9cab4b2c..000000000000 --- a/docs/langref/relay/intro.rst +++ /dev/null @@ -1,182 +0,0 @@ -============ -Introduction -============ - -Relay is a differentiable programming language with support for -closures, control flow, and recursion. It has an advanced -static type system specifically designed for programs written -by machine learning practitioners and researchers. Relay is intended to replace -the computation graph-based intermediate representations currently employed by deep -learning frameworks and compilers. - -The deep learning community has organically evolved a representation of machine learning -models that was useful for the form of computation originally desired, i.e., a directed -acyclic graph of primitive functions. Computation graphs also serve a dual purpose as both a -compile-time and run-time data structure. While computation graphs are a good fit -for models with a static topology, they make it tricky to incorporate control flow and -abstraction. Additionally, the conflation of the description of a computation, the -representation used for optimizing it, and the data structure used to execute it unnecessarily -hamper the goals of machine learning frameworks. We believe having a high-level, expressive language -designed for writing compiler optimizations is essential to the future of an end-to-end -deep learning compiler stack. - -Relay's design is influenced by the authors' experience building advanced optimizing compilers -for high-level languages, as well as by challenges presented by the current version of the -TVM stack, and NNVM's IR. We address a few important challenges with Relay's design. -Relay is an IR with closures, control-flow, recursion, and an advanced type system supporting -complex shape relationships and symbolic dimensions. We define a series of -automatic differentiation rules over the language, with the goal of enabling higher-order -differentiation of programs with control flow and closures. - -================== -Language -================== - -Relay is a purely functional, differentiable intermediate representation. - -================== -IR Reference -================== - -The IR has a global environment that stores the set of definitions, -constants, options, and attributes, and provides access to features -including type inferecnce, constant evaluation, and more. - -~~~~~~~~~~ -Relay Node -~~~~~~~~~~ - -The fundamental unit of the IR is the node, which only contains a Span. - -.. code-block:: python - - class Node: - span: Span - -================== -Variables -================== - -Relay has two notions of variables: local and global. -Our design draws inspiration from LLVM, which differentiates between identifier types. -This enables writers of optimizations to know precisely what an identifier references without needing -information beyond the kind of identifier. - -Globals are written with `@`, locals are written with `%`, and variables written without a -sigil name the corresponding operator. The distinction between global and local identifiers -makes certain kinds of transformation easier. For example, inlining a global definition -requires no analysis, as you can write a pass that directly inlines the definitions. -Ensuring there is no spooky action at a distance; introducing a new identifier return -type is omitted we will infer the return type based on the text of the program. - - -Global Variable -~~~~~~~~~~~~~~~~~~ - -Global identifiers are prefixed by the `@` sigil. A global identifier always -references a globally visibly definition contained in the environment. You -can write a global identifier as `@global`. - -Local Variable -~~~~~~~~~~~~~~~~~ - -Local identifiers are prefixed by the :code:`%` sigil. A local identifier always -references a parameter, or let bound expression. You can write a local -identifier as :code:`%local`. - - -================ -Global Functions -================ - -A function definition consists of a name, parameters, type parameters, and an optional return -type. A global function is no different from a procedure or function in a typical programming -language and generalizes the concept of a named subgraph. - -A definition minimally consists of an identifier :code:`@id`, an empty set of -parameters, and a body expression contained by curly braces. - -.. code-block:: python - - def @id() { body } - -A definition may also contain any number of parameters. For example, a -simple function which just adds two tensors: - -.. code-block:: python - - def @add(%x, %y) { %x + %y } - -It is also possible for us to annotate explicit types on definitions. For example, -we can restrict the above definition to only work on certain types: - -.. code-block:: python - - def @add(%x: Tensor, %y: Tensor) -> Tensor { - %x + %y - } - -A parameter is just a pairing of a :py:class:`~tvm.relay.expr.LocalVar` and optional :py:class:`~tvm.relay.ty.Type`. They represent -the formal parameters of functions and definitions, and are written as :code:`%x : T`. - -Parameters may only appear in function literals and definitions and have no relation -to parameters in the machine learning. - -When the type information is omitted, we will attempt to infer the most general type -for the users. This property is known as generalization: for a definition without -explicit annotations, we will attempt to assign the most general type. When the -return type is omitted, we will infer the return type based on the text of the -program. - -Finally, we can directly construct type-polymorphic definitions by writing down -a set of type parameters for a definition. For example, one can definte a -polymorphic identity function for tensors as follows: -:: - def @id(%x: Tensor) { - %x - } - -Notice we can omit the return type and it will still be inferred. - -.. *Note: this is not yet implemented.* - -.. Finally we allow a definition be prefixed by metadata, which adds -extra properties to the definition. - -.. It is important to be able to annotate metadata that is external to -the computational behavior of a definition. For example, we can use -this to add an `inline` or `noinline` attribute that the compiler -can consider when performing inlining. - -.. For example we can set the attributes for :code:`@id_real`.:: - - -.. attributes id_real { - inline: true - } - -.. def id_real(%x:Real) { ret %x } - - -========= -Operators -========= - -An operator is a primitive operation that is not defined in the Relay language but is provided -externally. Currently we back these operator's registrations with the operators -exposed by TVM's TOPI. An operator requires a user to provide an implementation -of the operator, its type, and various required attributes. - -The input methods for Relay programs do not provide a way to describe operators in -Relay; they must be explicitly registered in the global environment via Python or C++. -Operators are rendered without a sigil (e.g :code:`add`, :code:`subtract`) when pretty- -printing Relay programs. Operators are explicitly contained in the program and are uniquely -identifiable by pointer during a run of the Relay compiler. - -Programs -~~~~~~~~ - -Now that we have presented both global functions and operators, we have -everything in hand to describe a complete Relay program. A Relay program consists of a -registry of operators, one or more functions, as well as the global configuration -stored in the environment. diff --git a/docs/langref/relay/type_system.rst b/docs/langref/relay/type_system.rst deleted file mode 100644 index bb66d866e935..000000000000 --- a/docs/langref/relay/type_system.rst +++ /dev/null @@ -1,157 +0,0 @@ -=========== -Type System -=========== - -We have briefly introduced types while detailing the the expression language -of Relay, but have not yet described the type system. Relay is -a statically typed and type-inferred language, allowing programs to -be typed with a minimal requirement of explicit type information. - -Static types are useful because they enable efficient layout, memory reuse, and -code generation. They aid in debugging program transformations, but can also give us the -expressivity afforded by more dynamic languages. - -We are able to omit these type annotations by a process known as type inference. -Type inference is a technique that has its roots in the programming language -community, and can be viewed as a method for generalizing shape inference to -run over arbitrary user programs containing control flow and recursion. - -Static types are useful when performing compiler optimizations because they -communicate properties about the data we manipulate, such as runtime shape, -data layout, and storage without needing to run the program. - -Most current IRs use "shape inference" to recover tensor dimensions from the user -provided program. Machine learning users have enjoyed shape inference for -tensors because it allows them to generate performant code without giving up -on the expressivity of the input language. Because Relay is intended as an IR, we -require *some* type information to provide full inference. We don't believe this to be -an issue, as many of the IR builder interfaces require some type information or can -generate IR based on their own higher-level inferences. - -We view this limited shape inference as a simpler form of type -inference. Instead of relying on an ad hoc procedure for recovering type -information from a potentially dynamic program, we apply ideas from compiler -and IR design. Below, we briefly discuss the different kinds of types in Relay. - -===== -Types -===== - -Relay's type system has a "language of types" which enables one to write down the type of -a Relay program. Below we detail the language of types and how we assign types to Relay -expressions. - -Type -~~~~ - -The base type for all Relay types. All Relay types are sub-classes of this base type. - -See :py:class:`~tvm.relay.ty.Type` for its definition and documentation. - -Tensor Type -~~~~~~~~~~~ - -A concrete TensorType in Relay, see `tvm/relay/type.h` for more details. - -This is the type assigned to tensor's with a known dtype and shape. For -example a tensor of `float32` and `(5, 5)`. The data type must be a base -type as enforced by the kind checking rules described below. -The shape may be any valid Relay shape as described in the below section on shapes. - -See :py:class:`~tvm.relay.ty.TensorType` for its definition and documentation. - -Kind -~~~~ - -The kind of a type parameter, representing a variable shape, -base type, type, or dimension. - -This controls what a type parameter is allowed to be instantiated -with. For example one's of kind BaseType can only be `float32`, -`int32`, and so on. - -See :py:class:`~tvm.relay.ty.Kind` for its definition and documentation. - -Type Parameter -~~~~~~~~~~~~~~ - -A type parameter used for generic types in Relay, -see `tvm/relay/type.h` for more details. - -A type parameter represents a type placeholder that will -be filled in later on. This allows the user to write -functions that are generic over types. - -See :py:class:`~tvm.relay.ty.TypeParam` for its definition and documentation. - -Type Constraint -~~~~~~~~~~~~~~~ - -Abstract class representing a type constraint, to be elaborated -upon in further releases. - -See :py:class:`~tvm.relay.ty.TypeConstraint` for its definition and documentation. - -Function Type -~~~~~~~~~~~~~ -A function type in Relay, see `tvm/relay/type.h` for more details. - -This is the type assigned to functions in Relay. A function type consists of -a list of type parameters, a set of type constraints, a sequence of argument -types, and a return type. - -We informally write them as: -`forall (type_params), (arg_types) -> ret_type where type_constraints` - -See :py:class:`~tvm.relay.ty.FuncType` for its definition and documentation. - -Type Relation -~~~~~~~~~~~~~ - -A type relation is the most exotic type system feature in Relay. It allows -users to extend type inference with new rules. We use type relations to type operators with -"hard" types such as broadcasting operators, or :code:`flatten`. - -A type relation :code:`R` is an n-ary input, single-output relation over -types. To unpack that, it allows us to specify a relationship between -a set of input and output types. - -For example we can define the identity relation to be: - -.. code-block:: prolog - Identity(I, I) :- true - -Or we can define the relation for flatten: - -.. code-block:: prolog - Flatten(Tensor(sh, bt), O) :- - O = Tensor(sh[0], prod(sh[1:])) - -The above examples are written in Prolog-like syntax, but currently the relations -must be implemented by users in C++ or Python. - -If we have a relation such as :code:`Broadcast` it becomes possible to type things -like :code:`elemwise_add`: - -.. code-block:: python - elemwise_add : forall (Lhs : Type) (Rhs : Type), (Lhs, Rhs) -> Broadcast(Lhs, Rhs) - -You might ask why we write the relation in the return type. We use it as a -notational convenience for: - -.. code-block:: python - elemwise_add : forall (Lhs : Type) (Rhs : Type) (Out : Type), Broadcast(Lhs, Rhs, Out) => (Lhs, Rhs) -> Out - -That is, the user may pick the type of the :code:`Lhs`, :code:`Rhs`, and :code:`Out` as long as we can -show :code:`Broadcast(Lhs, Rhs, Out)` holds. - -See :py:class:`~tvm.relay.ty.TypeRelation` for its definition and documentation. - -Incomplete Type -~~~~~~~~~~~~~~~ - -A type or portion of a type that is not yet known. Only used during type inference. - -.. note:: Known as a "type variable" in the type checking literature. - -See :py:class:`~tvm.relay.ty.IncompleteType` for its definition and documentation. From a35f79725064c20d3df5020e715b1efe78862d31 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Fri, 30 Nov 2018 22:54:26 -0800 Subject: [PATCH 21/28] Fix base --- docs/api/python/relay/base.rst | 11 +++++++++-- python/tvm/relay/base.py | 6 +++--- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/docs/api/python/relay/base.rst b/docs/api/python/relay/base.rst index 869a402d7f8e..72315dca0193 100644 --- a/docs/api/python/relay/base.rst +++ b/docs/api/python/relay/base.rst @@ -2,8 +2,15 @@ tvm.relay.base -------------- .. automodule:: tvm.relay.base -.. autoclass:: tvm.relay.base.NodeBase +.. autofunction:: tvm.relay.base.register_relay_node + +.. autofunction:: tvm.relay.base.register_relay_attr_node + +.. autoclass:: tvm.relay.base.RelayNode :members: .. autoclass:: tvm.relay.base.Span - :members: \ No newline at end of file + :members: + +.. autoclass:: tvm.relay.base.Id + :members: diff --git a/python/tvm/relay/base.py b/python/tvm/relay/base.py index f1105fe4f0d9..4ba521cfa6d4 100644 --- a/python/tvm/relay/base.py +++ b/python/tvm/relay/base.py @@ -36,7 +36,7 @@ def register_relay_attr_node(type_key=None): class RelayNode(NodeBase): - """Base class of all relay node.""" + """Base class of all Relay nodes.""" def astext(self, show_meta_data=True, annotate=None): """Get the text format of the expression. @@ -52,8 +52,8 @@ def astext(self, show_meta_data=True, annotate=None): Note ---- - meta data section is necessary to fully parse the text format. - However, it can contain dumps that are big(constat weights), + The metadata section is necessary to fully parse the text format. + However, it can contain dumps that are big (e.g constant weights)a, so it can be helpful to skip printing the meta data section. Returns From 9569d319c501e5aaff4d98c00f7ba296bd8b4ab2 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Fri, 30 Nov 2018 23:46:05 -0800 Subject: [PATCH 22/28] Add files for all the Relay models --- docs/api/python/relay/backend.rst | 5 +++++ docs/api/python/relay/build_module.rst | 21 +++++++++++++++++++++ docs/api/python/relay/env.rst | 6 ------ docs/api/python/relay/frontend.rst | 6 ++++++ docs/api/python/relay/image.rst | 6 ++++++ docs/api/python/relay/index.rst | 13 +++++++++---- docs/api/python/relay/ir_builder.rst | 6 ------ docs/api/python/relay/module.rst | 6 ++++++ docs/api/python/relay/nn.rst | 3 +++ docs/api/python/relay/scope_builder.rst | 6 ++++++ docs/api/python/relay/vision.rst | 6 ++++++ python/tvm/relay/base.py | 14 +++++++++----- python/tvm/relay/build_module.py | 12 ++++++------ python/tvm/relay/image.py | 2 +- 14 files changed, 84 insertions(+), 28 deletions(-) create mode 100644 docs/api/python/relay/backend.rst create mode 100644 docs/api/python/relay/build_module.rst delete mode 100644 docs/api/python/relay/env.rst create mode 100644 docs/api/python/relay/frontend.rst create mode 100644 docs/api/python/relay/image.rst delete mode 100644 docs/api/python/relay/ir_builder.rst create mode 100644 docs/api/python/relay/module.rst create mode 100644 docs/api/python/relay/nn.rst create mode 100644 docs/api/python/relay/scope_builder.rst create mode 100644 docs/api/python/relay/vision.rst diff --git a/docs/api/python/relay/backend.rst b/docs/api/python/relay/backend.rst new file mode 100644 index 000000000000..04fd3263cbec --- /dev/null +++ b/docs/api/python/relay/backend.rst @@ -0,0 +1,5 @@ +tvm.relay.backend +-------------- +.. automodule:: tvm.relay.backend + + diff --git a/docs/api/python/relay/build_module.rst b/docs/api/python/relay/build_module.rst new file mode 100644 index 000000000000..eea18c6c6554 --- /dev/null +++ b/docs/api/python/relay/build_module.rst @@ -0,0 +1,21 @@ +tvm.relay.build_module +-------------- +.. automodule:: tvm.relay.build_module + +.. autofunction:: tvm.relay.build_module.build + +.. autofunction:: tvm.relay.build_module.optimize + +.. autofunction:: tvm.relay.build_module.create_executor + +.. autoclass:: tvm.relay.build_module.BuildConfig + :members: + +.. autofunction:: tvm.relay.build_module.build_config + :members: + +.. autoclass:: tvm.relay.build_module.GraphExecutor + :members: + + + diff --git a/docs/api/python/relay/env.rst b/docs/api/python/relay/env.rst deleted file mode 100644 index e54485700058..000000000000 --- a/docs/api/python/relay/env.rst +++ /dev/null @@ -1,6 +0,0 @@ -tvm.relay.env -------------- -.. automodule:: tvm.relay.env - -.. autoclass:: tvm.relay.env.Environment - :members: \ No newline at end of file diff --git a/docs/api/python/relay/frontend.rst b/docs/api/python/relay/frontend.rst new file mode 100644 index 000000000000..5364e63cb108 --- /dev/null +++ b/docs/api/python/relay/frontend.rst @@ -0,0 +1,6 @@ + +tvm.relay.frontend +-------------- +.. automodule:: tvm.relay.backend + + diff --git a/docs/api/python/relay/image.rst b/docs/api/python/relay/image.rst new file mode 100644 index 000000000000..99f2e2e0492d --- /dev/null +++ b/docs/api/python/relay/image.rst @@ -0,0 +1,6 @@ + +tvm.relay.image +-------------- +.. automodule:: tvm.relay.image + + diff --git a/docs/api/python/relay/index.rst b/docs/api/python/relay/index.rst index 7e9c3cb50f38..da3d3a912dd0 100644 --- a/docs/api/python/relay/index.rst +++ b/docs/api/python/relay/index.rst @@ -10,11 +10,16 @@ compiler stack. .. toctree:: :maxdepth: 2 + backend base - env + build_module expr - ir_builder + frontend + image ir_pass + module + nn op - to_tvm - type + scope_builder + ty + vision diff --git a/docs/api/python/relay/ir_builder.rst b/docs/api/python/relay/ir_builder.rst deleted file mode 100644 index 0e1c3d87a3e5..000000000000 --- a/docs/api/python/relay/ir_builder.rst +++ /dev/null @@ -1,6 +0,0 @@ -tvm.relay.ir_builder --------------------- -.. automodule:: tvm.relay.ir_builder - -.. autoclass:: tvm.relay.ir_builder.IRBuilder - :members: \ No newline at end of file diff --git a/docs/api/python/relay/module.rst b/docs/api/python/relay/module.rst new file mode 100644 index 000000000000..c150648bf7dd --- /dev/null +++ b/docs/api/python/relay/module.rst @@ -0,0 +1,6 @@ +tvm.relay.module +------------- +.. automodule:: tvm.relay.module + +.. autoclass:: tvm.relay.module.Module + :members: diff --git a/docs/api/python/relay/nn.rst b/docs/api/python/relay/nn.rst new file mode 100644 index 000000000000..5e3b010abe78 --- /dev/null +++ b/docs/api/python/relay/nn.rst @@ -0,0 +1,3 @@ +tvm.relay.nn +------------ +.. automodule:: tvm.relay.nn diff --git a/docs/api/python/relay/scope_builder.rst b/docs/api/python/relay/scope_builder.rst new file mode 100644 index 000000000000..3e28a8a37e0c --- /dev/null +++ b/docs/api/python/relay/scope_builder.rst @@ -0,0 +1,6 @@ +tvm.relay.scope_builder +-------------------- +.. automodule:: tvm.relay.scope_builder + +.. autoclass:: tvm.relay.scope_builder.ScopeBuilder + :members: diff --git a/docs/api/python/relay/vision.rst b/docs/api/python/relay/vision.rst new file mode 100644 index 000000000000..899d9f31b19b --- /dev/null +++ b/docs/api/python/relay/vision.rst @@ -0,0 +1,6 @@ + +tvm.relay.vision +-------------- +.. automodule:: tvm.relay.vision + + diff --git a/python/tvm/relay/base.py b/python/tvm/relay/base.py index 4ba521cfa6d4..c50013b199ac 100644 --- a/python/tvm/relay/base.py +++ b/python/tvm/relay/base.py @@ -8,12 +8,12 @@ NodeBase = NodeBase def register_relay_node(type_key=None): - """register relay node type + """Register a Relay node type. Parameters ---------- type_key : str or cls - The type key of the node + The type key of the node. """ if not isinstance(type_key, str): return _register_tvm_node( @@ -22,12 +22,12 @@ def register_relay_node(type_key=None): def register_relay_attr_node(type_key=None): - """register relay attribute node + """Register a Relay attribute node. Parameters ---------- type_key : str or cls - The type key of the node + The type key of the node. """ if not isinstance(type_key, str): return _register_tvm_node( @@ -66,12 +66,16 @@ def astext(self, show_meta_data=True, annotate=None): @register_relay_node class Span(RelayNode): + """Specifies a location in a source program.""" + def __init__(self, source, lineno, col_offset): self.__init_handle_by_constructor__(_make.Span, source, lineno, col_offset) @register_relay_node class Id(NodeBase): - """Unique identifier(name) for Var across type checking.""" + """Unique identifier(name) used in Var. + Guaranteed to be stable across all passes. + """ def __init__(self): raise RuntimeError("Cannot directly construct Id") diff --git a/python/tvm/relay/build_module.py b/python/tvm/relay/build_module.py index 2a2cd9f82ecb..7af22431aa81 100644 --- a/python/tvm/relay/build_module.py +++ b/python/tvm/relay/build_module.py @@ -240,13 +240,13 @@ class GraphExecutor(_interpreter.Executor): Parameters ---------- - mod : tvm.relay.Module + mod : :py:class:`~tvm.relay.module.Module` The module to support the execution. - ctx : tvm.TVMContext + ctx : :py:class:`TVMContext` The runtime context to run the code on. - target : tvm.Target + target : :py:class:`Target` The target option to build the function. """ def __init__(self, mod, ctx, target): @@ -282,13 +282,13 @@ def create_executor(kind="debug", kind : str The type of executor - mod : tvm.relay.Module + mod : :py:class:`~tvm.relay.module.Module` The Relay module containing collection of functions - ctx : tvm.TVMContext + ctx : :py:class:`tvm.TVMContext` The context to execute the code. - target : tvm.Target + target : :py:class:`tvm.Target` The corresponding context """ if ctx is not None: diff --git a/python/tvm/relay/image.py b/python/tvm/relay/image.py index 43cee89b3483..90bb87d71c2e 100644 --- a/python/tvm/relay/image.py +++ b/python/tvm/relay/image.py @@ -1,4 +1,4 @@ # pylint: disable=wildcard-import, unused-import, unused-wildcard-import -"""Image nets related operators.""" +"""Image network related operators.""" # Re-export in a specific file name so that autodoc can pick it up from .op.image import * From fbd8a47d5c86d8e7d25defe504bab144f05ba0e7 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Sat, 1 Dec 2018 00:04:35 -0800 Subject: [PATCH 23/28] Expose operators and fix some cross-refs --- docs/api/python/relay/expr.rst | 30 +++++++++++++++++++++++------- docs/api/python/relay/frontend.rst | 4 +++- docs/api/python/relay/image.rst | 4 +++- docs/api/python/relay/nn.rst | 4 ++++ docs/api/python/relay/op.rst | 3 ++- docs/api/python/relay/ty.rst | 12 ++++++++---- docs/api/python/relay/vision.rst | 5 +++++ python/tvm/relay/scope_builder.py | 8 ++++---- 8 files changed, 52 insertions(+), 18 deletions(-) diff --git a/docs/api/python/relay/expr.rst b/docs/api/python/relay/expr.rst index 100282f4b344..8a099d74962c 100644 --- a/docs/api/python/relay/expr.rst +++ b/docs/api/python/relay/expr.rst @@ -2,8 +2,11 @@ tvm.relay.expr -------------- .. automodule:: tvm.relay.expr -.. autoclass:: tvm.relay.expr.ExprBuilder - :members: +.. autofunction:: tvm.relay.expr.var + +.. autofunction:: tvm.relay.expr.const + +.. autofunction:: tvm.relay.expr.bind .. autoclass:: tvm.relay.expr.Expr :members: @@ -14,15 +17,12 @@ tvm.relay.expr .. autoclass:: tvm.relay.expr.Tuple :members: -.. autoclass:: tvm.relay.expr.LocalVar +.. autoclass:: tvm.relay.expr.Var :members: .. autoclass:: tvm.relay.expr.GlobalVar :members: -.. autoclass:: tvm.relay.expr.Param - :members: - .. autoclass:: tvm.relay.expr.Function :members: @@ -33,4 +33,20 @@ tvm.relay.expr :members: .. autoclass:: tvm.relay.expr.If - :members: \ No newline at end of file + :members: + +.. autoclass:: tvm.relay.expr.TupleGetItem + :members: + +.. autoclass:: tvm.relay.expr.TempExpr + :members: + +.. autoclass:: tvm.relay.expr.ExprFunctor + :members: + +.. autoclass:: tvm.relay.expr.ExprMutator + :members: + +.. autoclass:: tvm.relay.expr.TupleWrapper + :members + diff --git a/docs/api/python/relay/frontend.rst b/docs/api/python/relay/frontend.rst index 5364e63cb108..cfa19e80a17f 100644 --- a/docs/api/python/relay/frontend.rst +++ b/docs/api/python/relay/frontend.rst @@ -1,6 +1,8 @@ tvm.relay.frontend -------------- -.. automodule:: tvm.relay.backend +.. automodule:: tvm.relay.frontend + +.. autofunc:: tvm.relay.frontend.from_mxnet diff --git a/docs/api/python/relay/image.rst b/docs/api/python/relay/image.rst index 99f2e2e0492d..f3ec21c4a2a5 100644 --- a/docs/api/python/relay/image.rst +++ b/docs/api/python/relay/image.rst @@ -2,5 +2,7 @@ tvm.relay.image -------------- .. automodule:: tvm.relay.image + :members: - +.. automodule:: tvm.relay.op.image.image + :members: diff --git a/docs/api/python/relay/nn.rst b/docs/api/python/relay/nn.rst index 5e3b010abe78..8e3f47f7bead 100644 --- a/docs/api/python/relay/nn.rst +++ b/docs/api/python/relay/nn.rst @@ -1,3 +1,7 @@ tvm.relay.nn ------------ .. automodule:: tvm.relay.nn + :members: + +.. automodule:: tvm.relay.op.nn.nn + :members: diff --git a/docs/api/python/relay/op.rst b/docs/api/python/relay/op.rst index 8db3c3e75aba..42bbd7522c42 100644 --- a/docs/api/python/relay/op.rst +++ b/docs/api/python/relay/op.rst @@ -1,3 +1,4 @@ tvm.relay.op ------------ -.. automodule:: tvm.relay.op \ No newline at end of file +.. automodule:: tvm.relay.op + :members: diff --git a/docs/api/python/relay/ty.rst b/docs/api/python/relay/ty.rst index 971f83bd89c1..2b1d8a402238 100644 --- a/docs/api/python/relay/ty.rst +++ b/docs/api/python/relay/ty.rst @@ -1,6 +1,7 @@ tvm.relay.ty -------------- .. automodule:: tvm.relay.ty + :members: .. autoclass:: tvm.relay.ty.Type :members: @@ -11,17 +12,20 @@ tvm.relay.ty .. autoclass:: tvm.relay.ty.Kind :members: -.. autoclass:: tvm.relay.ty.TypeParam +.. autoclass:: tvm.relay.ty.TypeVar :members: .. autoclass:: tvm.relay.ty.TypeConstraint :members: -.. autoclass:: tvm.relay.ty.FuncType +.. autoclass:: tvm.relay.ty.TupleType :members: -.. autoclass:: tvm.relay.ty.TypeCall +.. autoclass:: tvm.relay.ty.FuncType :members: .. autoclass:: tvm.relay.ty.IncompleteType - :members: \ No newline at end of file + :members: + +.. autoclass:: tvm.relay.ty.TypeRelation + :members: diff --git a/docs/api/python/relay/vision.rst b/docs/api/python/relay/vision.rst index 899d9f31b19b..351acdeb01a0 100644 --- a/docs/api/python/relay/vision.rst +++ b/docs/api/python/relay/vision.rst @@ -2,5 +2,10 @@ tvm.relay.vision -------------- .. automodule:: tvm.relay.vision + :members: +.. automodule:: tvm.relay.op.vision.multibox + :members: +.. automodule:: tvm.relay.op.vision.nms + :members: diff --git a/python/tvm/relay/scope_builder.py b/python/tvm/relay/scope_builder.py index 641566946f58..074a4aa66c81 100644 --- a/python/tvm/relay/scope_builder.py +++ b/python/tvm/relay/scope_builder.py @@ -61,7 +61,7 @@ class ScopeBuilder(object): Examples -------- - ..code-block: python + .. code-block: python sb = relay.ScopeBuilder() cond = relay.var("cond", 'bool') @@ -115,7 +115,7 @@ def if_scope(self, cond): Parameters ---------- - cond: tvm.relay.Expr + cond: tvm.relay.expr.Expr The condition Returns @@ -165,7 +165,7 @@ def ret(self, value): Parameters ---------- - value: tvm.relay.Expr + value: tvm.relay.expr.Expr The return value. """ if self._ret_values[-1] is not None: @@ -177,7 +177,7 @@ def get(self): Returns ------- - value: tvm.relay.Expr + value: tvm.relay.expr.Expr The final result of the expression. """ if len(self._bindings) != 1: From 9622dc99a958ec7114e4d556a8a0929455f794e0 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Sat, 1 Dec 2018 00:07:03 -0800 Subject: [PATCH 24/28] Add backend and frontend docs --- docs/api/python/relay/backend.rst | 7 +++++++ docs/api/python/relay/frontend.rst | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/docs/api/python/relay/backend.rst b/docs/api/python/relay/backend.rst index 04fd3263cbec..7b8c5f9bfcb9 100644 --- a/docs/api/python/relay/backend.rst +++ b/docs/api/python/relay/backend.rst @@ -2,4 +2,11 @@ tvm.relay.backend -------------- .. automodule:: tvm.relay.backend +.. automodule::tvm.relay.backend.interpreter + :members: +.. automodule::tvm.relay.backend.compile_engine + :members: + +.. automodule::tvm.relay.backend.graph_runtime_codegen + :members: diff --git a/docs/api/python/relay/frontend.rst b/docs/api/python/relay/frontend.rst index cfa19e80a17f..b30dfa175833 100644 --- a/docs/api/python/relay/frontend.rst +++ b/docs/api/python/relay/frontend.rst @@ -3,6 +3,6 @@ tvm.relay.frontend -------------- .. automodule:: tvm.relay.frontend -.. autofunc:: tvm.relay.frontend.from_mxnet +.. autofunction:: tvm.relay.frontend.from_mxnet From a0ce6aea45c0d7c01ab5635f803e98cf0489fe38 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Sat, 1 Dec 2018 00:10:13 -0800 Subject: [PATCH 25/28] Ensure headings are right length --- docs/api/python/relay/backend.rst | 9 +++++---- docs/api/python/relay/build_module.rst | 3 ++- docs/api/python/relay/expr.rst | 1 + docs/api/python/relay/frontend.rst | 3 ++- docs/api/python/relay/image.rst | 3 ++- docs/api/python/relay/module.rst | 3 ++- docs/api/python/relay/scope_builder.rst | 3 ++- docs/api/python/relay/ty.rst | 3 ++- docs/api/python/relay/vision.rst | 3 ++- python/tvm/relay/frontend/__init__.py | 8 +++++++- 10 files changed, 27 insertions(+), 12 deletions(-) diff --git a/docs/api/python/relay/backend.rst b/docs/api/python/relay/backend.rst index 7b8c5f9bfcb9..a6085c3232ef 100644 --- a/docs/api/python/relay/backend.rst +++ b/docs/api/python/relay/backend.rst @@ -1,12 +1,13 @@ tvm.relay.backend --------------- +----------------- + .. automodule:: tvm.relay.backend -.. automodule::tvm.relay.backend.interpreter +.. automodule:: tvm.relay.backend.interpreter :members: -.. automodule::tvm.relay.backend.compile_engine +.. automodule:: tvm.relay.backend.compile_engine :members: -.. automodule::tvm.relay.backend.graph_runtime_codegen +.. automodule:: tvm.relay.backend.graph_runtime_codegen :members: diff --git a/docs/api/python/relay/build_module.rst b/docs/api/python/relay/build_module.rst index eea18c6c6554..15215d68bc33 100644 --- a/docs/api/python/relay/build_module.rst +++ b/docs/api/python/relay/build_module.rst @@ -1,5 +1,6 @@ tvm.relay.build_module --------------- +---------------------- + .. automodule:: tvm.relay.build_module .. autofunction:: tvm.relay.build_module.build diff --git a/docs/api/python/relay/expr.rst b/docs/api/python/relay/expr.rst index 8a099d74962c..540d6bfbab65 100644 --- a/docs/api/python/relay/expr.rst +++ b/docs/api/python/relay/expr.rst @@ -1,5 +1,6 @@ tvm.relay.expr -------------- + .. automodule:: tvm.relay.expr .. autofunction:: tvm.relay.expr.var diff --git a/docs/api/python/relay/frontend.rst b/docs/api/python/relay/frontend.rst index b30dfa175833..2ed10b9c27d8 100644 --- a/docs/api/python/relay/frontend.rst +++ b/docs/api/python/relay/frontend.rst @@ -1,6 +1,7 @@ tvm.relay.frontend --------------- +------------------ + .. automodule:: tvm.relay.frontend .. autofunction:: tvm.relay.frontend.from_mxnet diff --git a/docs/api/python/relay/image.rst b/docs/api/python/relay/image.rst index f3ec21c4a2a5..223213eca8e3 100644 --- a/docs/api/python/relay/image.rst +++ b/docs/api/python/relay/image.rst @@ -1,6 +1,7 @@ tvm.relay.image --------------- +--------------- + .. automodule:: tvm.relay.image :members: diff --git a/docs/api/python/relay/module.rst b/docs/api/python/relay/module.rst index c150648bf7dd..ec9642b484ba 100644 --- a/docs/api/python/relay/module.rst +++ b/docs/api/python/relay/module.rst @@ -1,5 +1,6 @@ tvm.relay.module -------------- +---------------- + .. automodule:: tvm.relay.module .. autoclass:: tvm.relay.module.Module diff --git a/docs/api/python/relay/scope_builder.rst b/docs/api/python/relay/scope_builder.rst index 3e28a8a37e0c..19fca89bf2d2 100644 --- a/docs/api/python/relay/scope_builder.rst +++ b/docs/api/python/relay/scope_builder.rst @@ -1,5 +1,6 @@ tvm.relay.scope_builder --------------------- +----------------------- + .. automodule:: tvm.relay.scope_builder .. autoclass:: tvm.relay.scope_builder.ScopeBuilder diff --git a/docs/api/python/relay/ty.rst b/docs/api/python/relay/ty.rst index 2b1d8a402238..edf15275db03 100644 --- a/docs/api/python/relay/ty.rst +++ b/docs/api/python/relay/ty.rst @@ -1,5 +1,6 @@ tvm.relay.ty --------------- +------------ + .. automodule:: tvm.relay.ty :members: diff --git a/docs/api/python/relay/vision.rst b/docs/api/python/relay/vision.rst index 351acdeb01a0..7751dd688b15 100644 --- a/docs/api/python/relay/vision.rst +++ b/docs/api/python/relay/vision.rst @@ -1,6 +1,7 @@ tvm.relay.vision --------------- +---------------- + .. automodule:: tvm.relay.vision :members: diff --git a/python/tvm/relay/frontend/__init__.py b/python/tvm/relay/frontend/__init__.py index 28766b9ae3be..2d01174a0d96 100644 --- a/python/tvm/relay/frontend/__init__.py +++ b/python/tvm/relay/frontend/__init__.py @@ -1,4 +1,10 @@ -"""Relay frontends.""" +""" +Frontends for constructing Relay programs. + +Contains the model importers currently defined +for Relay. +""" + from __future__ import absolute_import from .mxnet import from_mxnet From f1c896ce66956a4a9c24e359052335d36595ad75 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Sat, 1 Dec 2018 00:16:12 -0800 Subject: [PATCH 26/28] Export everything op.rst aswell --- docs/api/python/relay/op.rst | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/docs/api/python/relay/op.rst b/docs/api/python/relay/op.rst index 42bbd7522c42..7413a818f73f 100644 --- a/docs/api/python/relay/op.rst +++ b/docs/api/python/relay/op.rst @@ -2,3 +2,24 @@ tvm.relay.op ------------ .. automodule:: tvm.relay.op :members: + +.. automodule:: tvm.relay.op.op + :members: + +.. automodule:: tvm.relay.op.reduce + :members: + +.. automodule:: tvm.relay.op.tensor + :members: + +.. automodule:: tvm.relay.op.transform + :members: + +.. automodule:: tvm.relay.op.nn.nn + :members: + +.. automodule:: tvm.relay.op.vision.multibox + :members: + +.. automodule:: tvm.relay.op.vision.nms + :members: From 45825f9c2b8b1489c832bf93034386034c54f945 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Sat, 1 Dec 2018 00:19:03 -0800 Subject: [PATCH 27/28] Tweak backend docs --- docs/api/python/relay/backend.rst | 3 +++ python/tvm/relay/backend/interpreter.py | 6 ++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/docs/api/python/relay/backend.rst b/docs/api/python/relay/backend.rst index a6085c3232ef..5cbc250b55ba 100644 --- a/docs/api/python/relay/backend.rst +++ b/docs/api/python/relay/backend.rst @@ -3,6 +3,9 @@ tvm.relay.backend .. automodule:: tvm.relay.backend +Interpreter +----------- + .. automodule:: tvm.relay.backend.interpreter :members: diff --git a/python/tvm/relay/backend/interpreter.py b/python/tvm/relay/backend/interpreter.py index 5c7401c8c146..ff6cf6aa1d5c 100644 --- a/python/tvm/relay/backend/interpreter.py +++ b/python/tvm/relay/backend/interpreter.py @@ -1,5 +1,5 @@ #pylint: disable=no-else-return -"""An interface to the Realy interpreter.""" +"""The Python interface to the Relay reference interpreter.""" from __future__ import absolute_import import numpy as np @@ -23,6 +23,7 @@ def from_scalar(value, dtype=None): @register_relay_node class TupleValue(Value): + """A tuple value produced by the interpreter.""" def __init__(self, *fields): self.__init_handle_by_constructor__( _make.TupleValue, fields) @@ -33,12 +34,13 @@ def __getitem__(self, field_no): @register_relay_node class Closure(Value): + """A closure produced by the interpreter.""" pass @register_relay_node class TensorValue(Value): - """A Tensor value produced by the evaluator.""" + """A Tensor value produced by the interpreter.""" def __init__(self, data): """Allocate a new TensorValue and copy the data from `array` into From cebbaa73b6b444e7a130a26445eb7dcb0d558052 Mon Sep 17 00:00:00 2001 From: Jared Roesch Date: Sat, 1 Dec 2018 00:21:03 -0800 Subject: [PATCH 28/28] Clean up whitespace --- docs/api/python/relay/build_module.rst | 3 --- docs/api/python/relay/frontend.rst | 2 -- 2 files changed, 5 deletions(-) diff --git a/docs/api/python/relay/build_module.rst b/docs/api/python/relay/build_module.rst index 15215d68bc33..a278940f0fd5 100644 --- a/docs/api/python/relay/build_module.rst +++ b/docs/api/python/relay/build_module.rst @@ -17,6 +17,3 @@ tvm.relay.build_module .. autoclass:: tvm.relay.build_module.GraphExecutor :members: - - - diff --git a/docs/api/python/relay/frontend.rst b/docs/api/python/relay/frontend.rst index 2ed10b9c27d8..a418e042bf3d 100644 --- a/docs/api/python/relay/frontend.rst +++ b/docs/api/python/relay/frontend.rst @@ -5,5 +5,3 @@ tvm.relay.frontend .. automodule:: tvm.relay.frontend .. autofunction:: tvm.relay.frontend.from_mxnet - -