Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion cmake/modules/contrib/CMSISNN.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
if(USE_CMSISNN)
add_definitions(-DTVM_USE_CMSISNN)
message(STATUS "Build with CMSIS-NN support")
tvm_file_glob(GLOB RELAY_CONTRIB_CMSISNN_SRCS src/relay/backend/contrib/cmsisnn/*.cc)
tvm_file_glob(GLOB RELAY_CONTRIB_CMSISNN_SRCS
src/relay/backend/contrib/cmsisnn/*.cc
src/relay/backend/contrib/constant_transforms.cc)
list(APPEND COMPILER_SRCS ${RELAY_CONTRIB_CMSISNN_SRCS})
endif(USE_CMSISNN)
3 changes: 2 additions & 1 deletion cmake/modules/contrib/EthosN.cmake
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ if(NOT USE_ETHOSN STREQUAL "OFF")
list(APPEND RUNTIME_SRCS ${ETHOSN_RUNTIME_CONTRIB_SRC})

tvm_file_glob(GLOB COMPILER_ETHOSN_SRCS
src/relay/backend/contrib/ethosn/*)
src/relay/backend/contrib/ethosn/*
src/relay/backend/contrib/constant_transforms.cc)
list(APPEND COMPILER_SRCS ${COMPILER_ETHOSN_SRCS})

list(APPEND TVM_LINKER_LIBS ${ETHOSN_COMPILER_LIBRARY}
Expand Down
20 changes: 4 additions & 16 deletions src/relay/backend/contrib/cmsisnn/generate_constants.cc
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
#include "../../../op/make_op.h"
#include "../../../qnn/utils.h"
#include "../../../transforms/pattern_utils.h"
#include "../constant_transforms.h"
#include "convolutions.h"

namespace tvm {
Expand Down Expand Up @@ -64,22 +65,9 @@ class GenerateConstantsMutator : public MixedModeMutator {
attrs->out_dtype = std::move(conv2d_attrs->out_dtype);
*new_attrs = tvm::Attrs{attrs};

std::string kernel_layout = conv2d_attrs->kernel_layout.c_str();
int pos_o = kernel_layout.find("O");
int pos_h = kernel_layout.find("H");
int pos_w = kernel_layout.find("W");
int pos_i = kernel_layout.find("I");

IRModule kernel_module;
auto func_body = MakeTranspose(
kernel_expr, {Integer(pos_o), Integer(pos_h), Integer(pos_w), Integer(pos_i)});
auto kernel_func =
Function(FreeVars(func_body), func_body, Type(), FreeTypeVars(func_body, kernel_module));
GlobalVar kernel_var("main");
kernel_module->Add(kernel_var, kernel_func);
kernel_module = relay::transform::FoldConstant()(kernel_module);
kernel_func = Downcast<Function>(kernel_module->Lookup("main"));
return kernel_func->body;
Constant conv2d_kernel = Downcast<Constant>(kernel_expr);
conv2d_kernel = TransposeWeights(conv2d_kernel, conv2d_attrs->kernel_layout, "OHWI");
return conv2d_kernel;
}

/*! * \brief Performs weight transpose and substitutes existing constants in the composite
Expand Down
58 changes: 58 additions & 0 deletions src/relay/backend/contrib/constant_transforms.cc
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

#include "constant_transforms.h"

#include <string>

#include "../../transforms/pattern_utils.h"
#include "../../transforms/simplify_expr.h"

/*!
* \file src/relay/backend/contrib/constant_transforms.cc
* \brief Transforms applied to constant operations during codegen for BYOC backends.
*/

namespace tvm {
namespace relay {
namespace contrib {

Expr FoldConstantExpr(const Expr& expr, bool fold_qnn) {
auto mod = IRModule::FromExpr(expr);
mod = transform::FoldConstant(fold_qnn)(mod);
auto entry_func = Downcast<Function>(mod->Lookup("main"));
return expr.as<FunctionNode>() == nullptr ? entry_func->body : entry_func;
}

Constant TransposeWeights(const Constant& data, const std::string& source_layout,
const std::string& target_layout) {
Array<Integer> transpose_matrix;
for (const char& c : target_layout) {
int pos = source_layout.find(c);
transpose_matrix.push_back(pos);
}
Expr transpose = MakeTranspose(data, transpose_matrix);
transpose = InferType(FoldConstantExpr(transpose));
Constant transposed_data = Downcast<Constant>(transpose);
return transposed_data;
}

} // namespace contrib
} // namespace relay
} // namespace tvm
59 changes: 59 additions & 0 deletions src/relay/backend/contrib/constant_transforms.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

/*!
* \file src/relay/backend/contrib/constant_transforms.h
* \brief Transforms applied to constant operations during codegen for BYOC backends.
*/

#ifndef TVM_RELAY_BACKEND_CONTRIB_CONSTANT_TRANSFORMS_H_
#define TVM_RELAY_BACKEND_CONTRIB_CONSTANT_TRANSFORMS_H_

#include <tvm/relay/expr.h>

#include <string>

namespace tvm {
namespace relay {
namespace contrib {

/*!
* \brief Apply constant folding on an expression.
*
* \param expr The expression to fold.
* \param fold_qnn Whether to fold constants for QNN operations.
* \returns The new folded expression.
*/
Expr FoldConstantExpr(const Expr& expr, bool fold_qnn = true);

/*!
*\brief Transpose weights from `source_layout` to `target_layout`
*
* \param data The constant expression to transpose.
* \param source_layout The current layout of the constant e.g. "OHWI".
* \param target_layout The target layout of the constant e.g. "HWIO".
*/
Constant TransposeWeights(const Constant& data, const std::string& source_layout,
const std::string& target_layout);

} // namespace contrib
} // namespace relay
} // namespace tvm

#endif // TVM_RELAY_BACKEND_CONTRIB_CONSTANT_TRANSFORMS_H_
4 changes: 2 additions & 2 deletions src/relay/backend/contrib/ethosn/codegen.cc
Original file line number Diff line number Diff line change
Expand Up @@ -412,8 +412,8 @@ EthosnError ConstructNetworkVisitor::MakeFullyConnectedLayer(const Call& call,
return err;
}

auto weights = AddConstant(network_, params.weights_info, params.raw_weights).tensor;
auto bias = AddConstant(network_, params.bias_info, params.raw_bias).tensor;
auto weights = AddConstant(network_, params.weights_info, params.raw_weights->data).tensor;
auto bias = AddConstant(network_, params.bias_info, params.raw_bias->data).tensor;
try {
auto input =
AddReshape(network_, *operand_table_[call->args[0]][0], params.input_info.m_Dimensions)
Expand Down
1 change: 1 addition & 0 deletions src/relay/backend/contrib/ethosn/convert_equivalent.cc
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
#include "../../../qnn/utils.h"
#include "../../../transforms/pattern_utils.h"
#include "../../../transforms/simplify_expr.h"
#include "../constant_transforms.h"
#include "ethosn_api.h"

namespace tvm {
Expand Down
49 changes: 17 additions & 32 deletions src/relay/backend/contrib/ethosn/ethosn_api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
#include "../../../op/make_op.h"
#include "../../../transforms/pattern_utils.h"
#include "../../../transforms/simplify_expr.h"
#include "../constant_transforms.h"
#include "ethosn_support_library/Support.hpp"
#include "ethosn_support_library/SupportQueries.hpp"
#include "tvm/relay/qnn/attrs.h"
Expand Down Expand Up @@ -197,7 +198,10 @@ EthosnError EthosnAPI::QnnFullyConnected(const Expr& expr, FullyConnectedParams*
sl::QuantizationInfo output_q_info;
err += Tvm2Npu(input_zero_point, input_scale, &data_q_info);
err += Tvm2Npu(kernel_zero_point, kernel_scale, &weights_q_info);
err += Tvm2Npu(0, data_q_info.GetScale() * weights_q_info.GetScale(), &bias_q_info);
std::valarray<float> bias_scales = data_q_info.GetScale() * weights_q_info.GetScales();
const int bias_zero_point = 0;
const unsigned int bias_axis = 3;
err += Tvm2Npu(bias_zero_point, bias_scales, bias_axis, &bias_q_info);
err += Tvm2Npu(output_zero_point, output_scale, &output_q_info);

// Create fc info
Expand All @@ -213,27 +217,30 @@ EthosnError EthosnAPI::QnnFullyConnected(const Expr& expr, FullyConnectedParams*
data_data_type, sl::DataFormat::NHWC, data_q_info);

// Create weights info
const auto* weights_dtype = dense->args[1]->checked_type().as<TensorTypeNode>();
Constant weights_data = Downcast<Constant>(dense->args[1]);
weights_data = TransposeWeights(weights_data, "OI", "IO");
const auto* weights_ttype = weights_data->checked_type().as<TensorTypeNode>();
sl::TensorShape weights_tensor_shape;
sl::DataType weights_data_type;
sl::DataFormat weights_data_format;
// Ignore the error here because weights don't have a batch axis
Tvm2Npu(weights_dtype->shape, &weights_tensor_shape);
err += Tvm2Npu(weights_dtype->dtype, &weights_data_type);
Tvm2Npu(weights_ttype->shape, &weights_tensor_shape);
err += Tvm2Npu(weights_ttype->dtype, &weights_data_type);
err += Tvm2Npu("HWIO", &weights_data_format);
params->weights_info = sl::TensorInfo({1, 1, weights_tensor_shape[1], weights_tensor_shape[0]},
// Weights tensor shape is 1, 1, I, O
params->weights_info = sl::TensorInfo({1, 1, weights_tensor_shape[0], weights_tensor_shape[1]},
weights_data_type, weights_data_format, weights_q_info);
params->raw_weights = dense->args[1].as<ConstantNode>()->data->data;
params->raw_weights = weights_data->data;

// Create bias info
params->bias_info =
sl::TensorInfo({1, 1, 1, weights_tensor_shape[0]}, sl::DataType::INT32_QUANTIZED,
sl::TensorInfo({1, 1, 1, weights_tensor_shape[1]}, sl::DataType::INT32_QUANTIZED,
sl::DataFormat::NHWC, bias_q_info);
params->raw_bias = bias_add->args[1].as<ConstantNode>()->data->data;
params->raw_bias = bias_add->args[1].as<ConstantNode>()->data;

sl::TensorInfo output_tensor_info;
err += Tvm2Npu(requantize->checked_type(), &output_tensor_info);
output_tensor_info.m_Dimensions = {data_tensor_shape[0], 1, 1, weights_tensor_shape[0]};
output_tensor_info.m_Dimensions = {data_tensor_shape[0], 1, 1, weights_tensor_shape[1]};
output_tensor_info.m_QuantizationInfo = output_q_info;
params->output_info = output_tensor_info;

Expand Down Expand Up @@ -449,21 +456,6 @@ EthosnError EthosnAPI::Mean(const Expr& expr, MeanParams* params) {
return err;
}

Constant TransposeWeights(const Constant& data, const std::string& input_layout) {
int pos_h = input_layout.find("H");
int pos_w = input_layout.find("W");
int pos_i = input_layout.find("I");
int pos_o = input_layout.find("O");

// Currently the expected target layout is HWIO only.
Array<Integer> target_shape = {pos_h, pos_w, pos_i, pos_o};

Expr transpose = MakeTranspose(data, target_shape);
transpose = InferType(FoldConstantExpr(transpose));
Constant transposed_data = Downcast<Constant>(transpose);
return transposed_data;
}

EthosnError EthosnAPI::QnnConv2dTranspose(const Expr& expr, QnnConv2dTransposeParams* params) {
Call requantize = Downcast<Call>(expr);
Call bias;
Expand Down Expand Up @@ -530,7 +522,7 @@ EthosnError EthosnAPI::QnnConv2dTranspose(const Expr& expr, QnnConv2dTransposePa
// Create weights info
Constant weights_data = Downcast<Constant>(conv2d_transpose->args[1]);
if (conv_attr->kernel_layout != "HWIO") {
weights_data = TransposeWeights(weights_data, conv_attr->kernel_layout);
weights_data = TransposeWeights(weights_data, conv_attr->kernel_layout, "HWIO");
}
const auto* weights_ttype = weights_data->checked_type().as<TensorTypeNode>();
sl::TensorShape weights_tensor_shape;
Expand Down Expand Up @@ -1080,13 +1072,6 @@ EthosnError EthosnAPI::AsConstant(const Expr& expr, T* out) {
return EthosnError();
}

Expr FoldConstantExpr(const Expr& expr, bool fold_qnn) {
auto mod = IRModule::FromExpr(expr);
mod = transform::FoldConstant(fold_qnn)(mod);
auto entry_func = Downcast<Function>(mod->Lookup("main"));
return expr.as<FunctionNode>() == nullptr ? entry_func->body : entry_func;
}

} // namespace ethosn
} // namespace contrib
} // namespace relay
Expand Down
13 changes: 2 additions & 11 deletions src/relay/backend/contrib/ethosn/ethosn_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -66,8 +66,8 @@ struct FullyConnectedParams {
sl::TensorInfo weights_info;
sl::TensorInfo bias_info;
sl::TensorInfo output_info;
void* raw_weights = nullptr;
void* raw_bias = nullptr;
runtime::NDArray raw_weights;
runtime::NDArray raw_bias;
};

struct MaxPool2DParams {
Expand Down Expand Up @@ -324,15 +324,6 @@ class EthosnAPI {
static EthosnError AsConstant(const Expr& expr, std::valarray<float>* out);
};

/*!
* \brief Apply constant folding on an expression.
*
* \param expr The expression to fold.
* \param fold_qnn Whether to fold constants for QNN operations.
* \returns The new folded expression.
*/
Expr FoldConstantExpr(const Expr& expr, bool fold_qnn = true);

} // namespace ethosn
} // namespace contrib
} // namespace relay
Expand Down
Loading