From 8b3f19839f0ed1d8cdbd6f83bf034dc2ae1cadc8 Mon Sep 17 00:00:00 2001 From: Krzysztof Parzyszek Date: Wed, 4 Oct 2023 11:10:40 -0700 Subject: [PATCH] [Unity][Fix] Remove duplicated words from comments, NFC Removed instances of accidentally repeated words from comments. There are cases where duplicated words appear legitimately, those cases remain unmodified. --- include/tvm/relax/transform.h | 2 +- python/tvm/relax/frontend/nn/op.py | 2 +- python/tvm/relax/op/linear_algebra.py | 4 ++-- python/tvm/relax/struct_info.py | 4 ++-- python/tvm/relax/training/utils.py | 2 +- python/tvm/relax/transform/transform.py | 8 ++++++-- python/tvm/runtime/disco/process_pool.py | 2 +- src/relax/op/tensor/linear_algebra.h | 2 +- src/relax/training/utils.h | 2 +- src/relax/transform/fuse_ops.cc | 2 +- src/relax/transform/static_plan_block_memory.cc | 2 +- tests/python/relax/test_transform_fold_constant.py | 2 +- 12 files changed, 19 insertions(+), 15 deletions(-) diff --git a/include/tvm/relax/transform.h b/include/tvm/relax/transform.h index 45a31b0911e2..78d835a4420f 100644 --- a/include/tvm/relax/transform.h +++ b/include/tvm/relax/transform.h @@ -503,7 +503,7 @@ TVM_DLL Pass DecomposeOpsForTraining(Optional func_name); * pass inserts the layout transformations in the call sites of PrimFuncs being replaced to * transform i/o buffers into expected layout. * - * \param op_impl_map Map from from kOperatorName attr (e.g., relax.conv2d) to replacement PrimFunc + * \param op_impl_map Map from kOperatorName attr (e.g., relax.conv2d) to replacement PrimFunc * \param op_buffer_transforms Map from kOperatorName attr to layout transformations on each of the * PrimFunc i/o buffers. * \param axis_separators Map from kOperatorName attr to axis_separators of each buffer_transforms diff --git a/python/tvm/relax/frontend/nn/op.py b/python/tvm/relax/frontend/nn/op.py index 8eafa80802d7..3e7b9d6bb2a3 100644 --- a/python/tvm/relax/frontend/nn/op.py +++ b/python/tvm/relax/frontend/nn/op.py @@ -256,7 +256,7 @@ def matmul(a: Tensor, b: Tensor, out_dtype: Optional[str] = None, name: str = "m out_dtype: Optional[Union[str, DataType]] The data type of the matmul result. - When it is not specified, the output dtype will be the the same as input dtype. + When it is not specified, the output dtype will be the same as input dtype. name : str Name hint. diff --git a/python/tvm/relax/op/linear_algebra.py b/python/tvm/relax/op/linear_algebra.py index e7e609937d27..efb5085c7882 100644 --- a/python/tvm/relax/op/linear_algebra.py +++ b/python/tvm/relax/op/linear_algebra.py @@ -41,7 +41,7 @@ def matmul(x1: Expr, x2: Expr, out_dtype: Optional[Union[str, DataType]] = None) out_dtype: Optional[Union[str, DataType]] The data type of the matmul result. - When it is not specified, the output dtype will be the the same as input dtype. + When it is not specified, the output dtype will be the same as input dtype. Returns ------- @@ -72,7 +72,7 @@ def linear( out_dtype: Optional[Union[str, DataType]] The data type of the matmul result. - When it is not specified, the output dtype will be the the same as input dtype. + When it is not specified, the output dtype will be the same as input dtype. Notes ----- diff --git a/python/tvm/relax/struct_info.py b/python/tvm/relax/struct_info.py index 4f89dc158c82..34a9d82595d1 100644 --- a/python/tvm/relax/struct_info.py +++ b/python/tvm/relax/struct_info.py @@ -148,7 +148,7 @@ class TensorStructInfo(StructInfo): dtype : Optional[str] The content data type. - vdevice : Optional[VDevice] + vdevice : Optional[Vdevice] The virtual device. ndim : Optional[int] @@ -247,7 +247,7 @@ def opaque_func( Parameters ---------- ret: Optional[StructInfo] - The struct info of the the function return value. + The struct info of the function return value. derive_func: Optional[EnvFunc] The environment function used for derivation diff --git a/python/tvm/relax/training/utils.py b/python/tvm/relax/training/utils.py index bf9e937457b6..4d1a32177227 100644 --- a/python/tvm/relax/training/utils.py +++ b/python/tvm/relax/training/utils.py @@ -90,7 +90,7 @@ def backbone_loss(input_instances, parameters, states, targets): Specify the number of `prediction_outputs` of the backbone function. Default: 1. new_func_name : Optional[str] - Specify the name of the appended result. If is is not specified, the name will be + Specify the name of the appended result. If it is not specified, the name will be `func_name + "_loss"`. Returns diff --git a/python/tvm/relax/transform/transform.py b/python/tvm/relax/transform/transform.py index 1676ba18c1f4..028a0e04d4f2 100644 --- a/python/tvm/relax/transform/transform.py +++ b/python/tvm/relax/transform/transform.py @@ -448,8 +448,12 @@ def BindParams( func_name: str The function name to be bound - params : Dict[Union[str,relax.Var],Union[tvm.runtime.NDArray, np.ndarray]] - The map from parameter or parameter name name to constant + params : Dict[ + Union[str,relax.Var], + Union[tvm.runtime.NDArray, np.ndarray], + ] + + The map from parameter or parameter name to constant tensors. Returns diff --git a/python/tvm/runtime/disco/process_pool.py b/python/tvm/runtime/disco/process_pool.py index fd4ba7a165aa..836744dba64d 100644 --- a/python/tvm/runtime/disco/process_pool.py +++ b/python/tvm/runtime/disco/process_pool.py @@ -165,7 +165,7 @@ def _kill_child_processes(pid): @register_func("runtime.disco.create_process_pool") def _create_process_pool(num_workers: int): - """Create a process pool where the workers' are are [1, num_workers).""" + """Create a process pool where the workers' are [1, num_workers).""" pool = [DiscoPopenWorker(i, num_workers) for i in range(1, num_workers)] def result_func(worker_id: int): diff --git a/src/relax/op/tensor/linear_algebra.h b/src/relax/op/tensor/linear_algebra.h index 83deb02012fd..e0f091bb6176 100644 --- a/src/relax/op/tensor/linear_algebra.h +++ b/src/relax/op/tensor/linear_algebra.h @@ -38,7 +38,7 @@ namespace relax { * \param x1 The first input tensor. * \param x2 The second input tensor. * \param out_dtype The data type of the matmul result. - * When it is not specified, the output dtype will be the the same as input dtype. + * When it is not specified, the output dtype will be the same as input dtype. * \return The computed result. */ Expr matmul(Expr x1, Expr x2, DataType out_dtype); diff --git a/src/relax/training/utils.h b/src/relax/training/utils.h index 074aedc287cd..f280308f9d51 100644 --- a/src/relax/training/utils.h +++ b/src/relax/training/utils.h @@ -46,7 +46,7 @@ namespace transform { * \param loss_function The loss function. * \param num_backbone_outputs Specify the number of `prediction_outputs` of the backbone function. * Default: 1. - * \param new_func_name Specify the name of the appended result. If is is not specified, the name + * \param new_func_name Specify the name of the appended result. If it is not specified, the name * will be `func_name + "_loss"`. * \return The Pass. */ diff --git a/src/relax/transform/fuse_ops.cc b/src/relax/transform/fuse_ops.cc index a13d0830a01d..8a76935e9b93 100644 --- a/src/relax/transform/fuse_ops.cc +++ b/src/relax/transform/fuse_ops.cc @@ -446,7 +446,7 @@ class FunctionCreator : public ExprMutator { } /*! - * \brief Create the grouped function according according to the collected bindings and parameters + * \brief Create the grouped function according to the collected bindings and parameters * \param composite_name The name to identify the pattern this function is created from, if any. * It will become the value of the kComposite attribute of the created function. * \note The created function won't be returned immediately. It's stored in the `function_` field. diff --git a/src/relax/transform/static_plan_block_memory.cc b/src/relax/transform/static_plan_block_memory.cc index e6aa450ff8e8..ef2d582548b9 100644 --- a/src/relax/transform/static_plan_block_memory.cc +++ b/src/relax/transform/static_plan_block_memory.cc @@ -662,7 +662,7 @@ class StorageAllocator : public StorageAllocatorBaseVisitor { /*! * \brief Check if a token has no reference and thus can be released. And release it if so. * \param token The token to be checked. - * \param release_site The CallNode where the the input token is send for release. + * \param release_site The CallNode where the input token is send for release. * If the token is checked to release here, we keep record of the release site so that * kill_tensor can be inserted here at the rewrite stage. */ diff --git a/tests/python/relax/test_transform_fold_constant.py b/tests/python/relax/test_transform_fold_constant.py index a4dffba11443..9f2e3a4a092d 100644 --- a/tests/python/relax/test_transform_fold_constant.py +++ b/tests/python/relax/test_transform_fold_constant.py @@ -24,7 +24,7 @@ def gen_mod(mod, name, binding): - """Select relax function with name, rename to main and and bind constant. + """Select relax function with name, rename to main and bind constant. Parameters ----------