Skip to content
This repository was archived by the owner on Nov 17, 2023. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
153 commits
Select commit Hold shift + click to select a range
15611c1
Fix build_ccache_wrappers:
larroy Apr 5, 2019
7711a0b
Optimize move semantics of NodeEntry
Feb 8, 2019
3294d84
Compile fix
Feb 8, 2019
9e4a084
Fix
Feb 10, 2019
cc08215
Fix crash, uninitialized Node ptr
Feb 11, 2019
092c952
Fix
Feb 11, 2019
6e1764d
Fix autograd crash
larroy Mar 15, 2019
59abd4d
Fix clang tidy errors
larroy Apr 10, 2019
0679ca2
Restore subrepo
larroy Apr 10, 2019
33e99db
restore tvm
larroy Apr 10, 2019
5f8c4b8
Update tvm
larroy Apr 10, 2019
3d955eb
Update tvm
larroy Apr 11, 2019
b85b9a6
Update NDarray with NodeEntry constructors and refine initializer lists
larroy Apr 11, 2019
aeec306
Fix lint
larroy Apr 11, 2019
778902f
fix
larroy Apr 12, 2019
fed230b
Revert "Restore subrepo"
larroy Apr 12, 2019
71e730c
Update tvm to my repo
larroy Apr 12, 2019
5fe3398
Fix
larroy Apr 12, 2019
331a324
Fix
larroy Apr 12, 2019
e4f8447
Fix
larroy Apr 12, 2019
b5fbc3f
Fix
larroy Apr 12, 2019
3435814
minor
larroy Apr 13, 2019
23bc3df
Fixes
larroy Apr 13, 2019
8180054
update tvm and dmlc-core
larroy Apr 13, 2019
0e1c99c
Update tvm
larroy Apr 13, 2019
2c7d653
Revert "update dmlc-core"
larroy Apr 11, 2019
d2b4eeb
Update dmlc-core and tvm
larroy Apr 15, 2019
6914fa5
Improve batch_norm with NodeEntry refactorings
larroy Apr 16, 2019
173ac2e
Update tvm
larroy Apr 16, 2019
4fc969c
Fix bug introduced in batch_norm
larroy Apr 16, 2019
675c4b7
lint
larroy Apr 17, 2019
575b62c
Add std::move
larroy Apr 17, 2019
4d4c062
Fix bugs
larroy Apr 17, 2019
176c910
Fix bug introduced in batch_norm
larroy Apr 17, 2019
423fc64
Update tvm
larroy Apr 17, 2019
7c63456
Update tvm
larroy Apr 17, 2019
3895aac
Sync gradient.cc with tvm
larroy Apr 17, 2019
81a714b
Fix bug
larroy Apr 17, 2019
412787a
Use at
larroy Apr 17, 2019
c49821f
update tvm
larroy Apr 17, 2019
a0da596
lint
larroy Apr 17, 2019
4e5233a
Update subrepos
larroy Apr 17, 2019
ed7a00d
update tvm
larroy Apr 17, 2019
c39afa4
Fix moves leaving nodes uninitialized
larroy Apr 19, 2019
0f55576
update tvm
larroy Apr 19, 2019
cfa3d8b
update tvm
larroy Apr 19, 2019
5778813
restore gitmodules
larroy Apr 24, 2019
b8b47b4
restore tvm
larroy Apr 24, 2019
a0c3b14
Revert "Fix build_ccache_wrappers:"
larroy Apr 25, 2019
2bfc51c
readability
larroy Apr 19, 2019
a173495
Add checks to gradient.cc
larroy Apr 23, 2019
f268a98
update tvm
larroy Apr 29, 2019
47c9d6e
Add backward to fully connected. (_backward_FullyConnected)
larroy Apr 23, 2019
05bb7ea
Add failing test
larroy Apr 24, 2019
c2b4412
CR
larroy Apr 24, 2019
b26ffe1
Add a test for FC grad
larroy Apr 24, 2019
4ef42d2
Fix test
larroy Apr 24, 2019
1277925
sync subrepos
larroy Apr 24, 2019
a5ad61d
Fix lint
larroy Apr 24, 2019
e118a8b
Check for null ptr in NDarray members
larroy Apr 25, 2019
f3feab5
uint32_t -> size_t
larroy Apr 25, 2019
3f74906
Fix warning in fully connected backward^2
larroy Apr 25, 2019
6eeedde
Refactor Backward
larroy Apr 27, 2019
6d2e18d
Refactor Imperative::Backward
larroy Apr 27, 2019
d68728a
Update tvm to my repo
larroy Apr 29, 2019
a600564
udpate tvm
larroy Apr 29, 2019
bb9123d
update tvm
larroy Apr 29, 2019
24fb41e
Remove unnecesary ctor call of NodeEntry
larroy Apr 29, 2019
d4cfb5f
Fix emplace_back
larroy Apr 29, 2019
f6def81
update tvm
larroy Apr 29, 2019
2472435
Refactor CreateGradientVariableNodes
larroy Apr 29, 2019
f8a9c4b
Fix errors and warnings
larroy Apr 29, 2019
5a1041d
Improve documentation for auxiliary backward functions
larroy Apr 29, 2019
dde42b9
gvar -> gvars
larroy Apr 29, 2019
2adbdfd
Fix lint
larroy Apr 29, 2019
0a419b0
Fix bug
larroy Apr 30, 2019
82de0b1
update tvm
larroy Apr 30, 2019
ab1c6bc
Fix lint
larroy Apr 30, 2019
696b25f
remove -Wextra
larroy May 1, 2019
8d935f2
Add op name to graph nodes
larroy May 1, 2019
b1c268b
Fix build_ccache_wrappers:
larroy Apr 5, 2019
7326632
Optimize move semantics of NodeEntry
Feb 8, 2019
d81b79b
Compile fix
Feb 8, 2019
937f1a7
Fix
Feb 10, 2019
edf6ecd
Fix crash, uninitialized Node ptr
Feb 11, 2019
f72c29c
Fix
Feb 11, 2019
66171ed
Fix autograd crash
larroy Mar 15, 2019
18bef66
Fix clang tidy errors
larroy Apr 10, 2019
d941f0f
Restore subrepo
larroy Apr 10, 2019
1b13551
restore tvm
larroy Apr 10, 2019
e4b152e
Update tvm
larroy Apr 10, 2019
6f57297
Update tvm
larroy Apr 11, 2019
bc95ea4
Update NDarray with NodeEntry constructors and refine initializer lists
larroy Apr 11, 2019
d026a0e
Fix lint
larroy Apr 11, 2019
b6fab77
fix
larroy Apr 12, 2019
91b99e8
Revert "Restore subrepo"
larroy Apr 12, 2019
69c6349
Update tvm to my repo
larroy Apr 12, 2019
8fc23fe
Fix
larroy Apr 12, 2019
cdc0896
Fix
larroy Apr 12, 2019
b726112
Fix
larroy Apr 12, 2019
d6bee4f
Fix
larroy Apr 12, 2019
deb9e2d
minor
larroy Apr 13, 2019
cdce4bf
Fixes
larroy Apr 13, 2019
9fb0b68
update tvm and dmlc-core
larroy Apr 13, 2019
2d6c4d0
Update tvm
larroy Apr 13, 2019
0412d07
Revert "update dmlc-core"
larroy Apr 11, 2019
a9ab18a
Update dmlc-core and tvm
larroy Apr 15, 2019
75397cb
Improve batch_norm with NodeEntry refactorings
larroy Apr 16, 2019
15cc946
Update tvm
larroy Apr 16, 2019
90604bb
Fix bug introduced in batch_norm
larroy Apr 16, 2019
9428db9
lint
larroy Apr 17, 2019
674149a
Add std::move
larroy Apr 17, 2019
d56d26e
Fix bugs
larroy Apr 17, 2019
f0d1812
Fix bug introduced in batch_norm
larroy Apr 17, 2019
7dfb1db
Update tvm
larroy Apr 17, 2019
992f54a
Update tvm
larroy Apr 17, 2019
dac9a33
Sync gradient.cc with tvm
larroy Apr 17, 2019
f7c2b9c
Fix bug
larroy Apr 17, 2019
6d4eae4
Use at
larroy Apr 17, 2019
325591e
update tvm
larroy Apr 17, 2019
7c15e68
lint
larroy Apr 17, 2019
8b328fe
Update subrepos
larroy Apr 17, 2019
b661b0d
update tvm
larroy Apr 17, 2019
982044c
Fix moves leaving nodes uninitialized
larroy Apr 19, 2019
abc857b
update tvm
larroy Apr 19, 2019
6016e2c
update tvm
larroy Apr 19, 2019
25d9fac
restore gitmodules
larroy Apr 24, 2019
3380e09
restore tvm
larroy Apr 24, 2019
bea00f5
Revert "Fix build_ccache_wrappers:"
larroy Apr 25, 2019
bf7cd9a
readability
larroy Apr 19, 2019
f55be15
Add checks to gradient.cc
larroy Apr 23, 2019
3ddd917
update tvm
larroy Apr 29, 2019
7d4ad83
fix clojure tests
gigasquid May 3, 2019
59102de
Merge pull request #2 from gigasquid/fix-clojure-test-for-pr-14095
larroy May 3, 2019
0429665
Merge remote-tracking branch 'origin/node_ptr' into fc_higher_order_g…
larroy May 3, 2019
1be6c4d
Fix warning
larroy May 6, 2019
beeea60
Minor refactor
larroy May 13, 2019
f11e36b
Add test for second order gradient
larroy May 14, 2019
6d2788b
Merge remote-tracking branch 'upstream/master' into fc_higher_order_g…
larroy May 20, 2019
27c9131
CR
larroy Jun 5, 2019
a134039
Merge remote-tracking branch 'upstream/master' into fc_higher_order_g…
larroy Jun 5, 2019
67eb0b1
pep8
larroy Jun 5, 2019
74ec09f
Remove test
larroy Jun 5, 2019
0afab87
Restored submodules
larroy Jun 5, 2019
6d270e6
Merge remote-tracking branch 'upstream/master' into fc_higher_order_g…
larroy Jul 24, 2019
ba1d307
Merge remote-tracking branch 'upstream/master' into fc_higher_order_g…
larroy Jul 25, 2019
5d36187
Fix build
larroy Jul 25, 2019
a551b2d
Fix dev_menu
larroy Jul 25, 2019
79a9ba6
CR
larroy Jul 25, 2019
467fd0f
rename NDArray.entry_ to NDArray.autograd_
larroy Jul 25, 2019
0f262a7
Fix lint
larroy Jul 25, 2019
9fcb1f1
Merge remote-tracking branch 'upstream/master' into fc_higher_order_g…
larroy Aug 23, 2019
84a7654
CR comments, validate grad_req is in range
larroy Aug 23, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -143,7 +143,7 @@ else(MSVC)
add_definitions(-DMSHADOW_USE_F16C=0)
endif()
set(CMAKE_POSITION_INDEPENDENT_CODE ON)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -Wno-unknown-pragmas -Wno-sign-compare")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -Wno-unknown-pragmas -Wno-sign-compare -Werror=return-type")
if ("${CMAKE_CXX_COMPILER_ID}" MATCHES ".*Clang$")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-braced-scalar-init")
endif()
Expand Down
2 changes: 1 addition & 1 deletion include/mxnet/c_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -1224,7 +1224,7 @@ MXNET_DLL int MXAutogradBackward(mx_uint num_output,
* \param output_handles output NDArrays
* \param ograd_handles head gradient for NDArrays
* \param num_variables number of variables
* \param
* \param var_handles variables to compute gradient with respect to (d / d var)
* \param retain_graph whether to keep the graph after backward
* \param is_train whether to do backward for training or inference
* \return 0 when success, -1 when failure happens
Expand Down
107 changes: 67 additions & 40 deletions include/mxnet/imperative.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,48 +35,53 @@
#include "./ndarray.h"

namespace mxnet {
/*! \brief runtime functions for NDArray */
class Imperative {
/*!
* Autograd Info used in class: nnvm::Node::info
*/
class AGInfo {
public:
/*! \brief */
class AGInfo {
public:
Context ctx;
OpReqType grad_req;
OpStatePtr state;
std::vector<NDArray> outputs;
std::vector<NDArray> out_grads;
bool fresh_out_grad;
Context ctx;
OpReqType grad_req;
OpStatePtr state;
std::vector<NDArray> outputs;
std::vector<NDArray> out_grads;
bool fresh_out_grad;

AGInfo() :
AGInfo() :
grad_req(kNullOp), fresh_out_grad(false) {}

static void Clear(const nnvm::NodePtr& node) {
if (node == nullptr || node->info.empty()) return;
AGInfo& info = Get(node);
if (info.grad_req != kNullOp) return;
node->info.clear();
}
static void Clear(const nnvm::NodePtr& node) {
if (node == nullptr || node->info.empty()) return;
AGInfo& info = Get(node);
if (info.grad_req != kNullOp) return;
node->info.clear();
}

static AGInfo& Get(const nnvm::NodePtr& node) {
return dmlc::get<AGInfo>(node->info);
}
static AGInfo& Get(const nnvm::NodePtr& node) {
return dmlc::get<AGInfo>(node->info);
}

static AGInfo& Create(const nnvm::NodePtr& node) {
node->info.construct<AGInfo>();
return Get(node);
}
static AGInfo& Create(const nnvm::NodePtr& node) {
node->info.construct<AGInfo>();
return Get(node);
}

static bool IsNone(const NDArray& arr) {
return arr.entry_.node == nullptr || arr.entry_.node->info.empty();
}
static bool IsNone(const NDArray& arr) {
return arr.autograd_.node == nullptr || arr.autograd_.node->info.empty();
}

static bool IsVariable(const nnvm::NodePtr& node) {
AGInfo& info = Get(node);
return info.grad_req != kNullOp && info.outputs.size() == 1
&& info.out_grads.size() == 1;
}
};

/*! \brief runtime functions for NDArray */
class Imperative {
public:
/*! \brief */

static bool IsVariable(const nnvm::NodePtr& node) {
AGInfo& info = Get(node);
return info.grad_req != kNullOp && info.outputs.size() == 1
&& info.out_grads.size() == 1;
}
};
/*! \brief whether operator recording is on. */
bool is_training() const {
return is_train_;
Expand All @@ -97,11 +102,11 @@ class Imperative {
is_recording_ = is_recording;
return old;
}
/*! brief whether numpy compatibility is on. */
/*! \brief whether numpy compatibility is on. */
bool is_np_shape() const {
return is_np_shape_;
}
/*! brief turn on or turn off numpy compatibility switch. */
/*! \brief turn on or turn off numpy compatibility switch. */
bool set_is_np_shape(bool is_np_shape) {
bool old = is_np_shape_;
is_np_shape_ = is_np_shape;
Expand Down Expand Up @@ -160,17 +165,39 @@ class Imperative {

private:
friend class NDArray;
/*! \brief make constructor protected. */
/*! Create a forward graph
* @param output_nodes graph node vector to add nodes to
* @param outputs source ndarrays
* @return vector of nodes
*/
static nnvm::Graph CreateGraph(const std::vector<NDArray *> &outputs);
/*! Create gradient nodes using output shapes and ctx.
* Gradient heads are initialized to 1 if they are not present (nullptr)
* @return vector of nodes
*/
static std::vector<nnvm::NodeEntry> CreateHeadGradientNodes(const std::vector<NDArray*>& outputs,
const std::vector<NDArray*>& ograds);

struct GradientVariableNodes;
/*! Create variable nodes.
* If variables is provided, gradient nodes are crated for them. Otherwise it uses read only
* inputs reachable from the outputs.
* @param variables
* @param outputs
* @return aux data structure with nodes and arrays for gradients
*/
GradientVariableNodes CreateGradientVariableNodes(const std::vector<NDArray*>& variables,
const std::vector<nnvm::NodeEntry>& outputs);
Imperative() {
if (PreferBulkExecTrain())
backward_bulk_size_ = BulkExecMaxNodeTrainBwd();
}
/*! \brief find the input/output ndarrays that are needed for backward */
void GetBackwardDependency(
const nnvm::NodePtr& node,
uint32_t num_inputs, uint32_t num_outputs,
std::vector<bool> *p_save_inputs,
std::vector<bool> *p_save_outputs);
size_t num_inputs, size_t num_outputs,
std::vector<bool> *save_inputs,
std::vector<bool> *save_outputs);
/*! \brief indicate whether is training. */
#if DMLC_CXX11_THREAD_LOCAL
static thread_local bool is_train_;
Expand Down
23 changes: 13 additions & 10 deletions include/mxnet/ndarray.h
Original file line number Diff line number Diff line change
Expand Up @@ -80,10 +80,12 @@ class MKLDNNMemory;
* \brief ndarray interface
*/
class NDArray {
friend class AGInfo;
friend class Imperative;
public:
/*! \brief default constructor */
NDArray()
: entry_(nullptr) {
: autograd_(nullptr) {
}
/*!
* \brief constructs a new dynamic NDArray
Expand All @@ -98,7 +100,7 @@ class NDArray {
shape_(shape),
dtype_(dtype),
storage_type_(kDefaultStorage),
entry_(nullptr) {
autograd_(nullptr) {
}
/*! \brief constructor for NDArray with storage type
*/
Expand All @@ -117,7 +119,7 @@ class NDArray {
shape_(),
dtype_(dtype),
storage_type_(kDefaultStorage),
entry_(nullptr) {
autograd_(nullptr) {
}
/*!
* \brief constructing a static NDArray that shares data with TBlob
Expand All @@ -131,7 +133,7 @@ class NDArray {
shape_(data.shape_),
dtype_(data.type_flag_),
storage_type_(kDefaultStorage),
entry_(nullptr) {
autograd_(nullptr) {
}

/*!
Expand All @@ -149,7 +151,7 @@ class NDArray {
}),
shape_(data.shape_),
dtype_(data.type_flag_), storage_type_(kDefaultStorage),
entry_(nullptr) {
autograd_(nullptr) {
}

/*! \brief create ndarray from shared memory */
Expand All @@ -158,7 +160,7 @@ class NDArray {
shape_(shape),
dtype_(dtype),
storage_type_(kDefaultStorage),
entry_(nullptr) {
autograd_(nullptr) {
}

/*!
Expand All @@ -177,7 +179,7 @@ class NDArray {
shape_(shape),
dtype_(data.type_flag_),
storage_type_(stype),
entry_(nullptr) {
autograd_(nullptr) {
}
/*!
* \brief initialize the NDArray, assuming it is not assigned a meaningful shape before
Expand Down Expand Up @@ -387,6 +389,7 @@ class NDArray {
}
/*! \return the associated variable of the ndarray.*/
inline Engine::VarHandle var() const {
CHECK(ptr_);
return ptr_->var;
}
/*! \return byte offset in chunk of the ndarray*/
Expand All @@ -395,6 +398,7 @@ class NDArray {
}
/*! \brief return var version of the NDArray*/
inline size_t version() const {
CHECK(var());
return var()->version();
}
/*!
Expand Down Expand Up @@ -649,7 +653,7 @@ class NDArray {
*/
NDArray Detach() const {
NDArray ret(*this);
ret.entry_ = nnvm::NodeEntry(nullptr);
ret.autograd_ = nnvm::NodeEntry(nullptr);
return ret;
}

Expand Down Expand Up @@ -812,7 +816,6 @@ class NDArray {
std::vector<std::string>* keys);

private:
friend class Imperative;
/*! \brief the real data chunk that backs NDArray */
// shandle is used to store the actual values in the NDArray
// aux_handles store the aux data(such as indices) if it's needed by non-default storage.
Expand Down Expand Up @@ -1102,7 +1105,7 @@ class NDArray {
/*! \brief storage type of data */
NDArrayStorageType storage_type_ = kUndefinedStorage;
/*! \brief node entry for autograd */
nnvm::NodeEntry entry_;
nnvm::NodeEntry autograd_;
/*!
* \brief internal TBlob
* \note When user access tblob_ by some const methods like
Expand Down
3 changes: 2 additions & 1 deletion include/mxnet/op_attr_types.h
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,8 @@ enum OpReqType {
*/
kWriteInplace,
/*! \brief add to the provided space */
kAddTo
kAddTo,
kOpReqTypeMax
};

/*!
Expand Down
2 changes: 1 addition & 1 deletion src/c_api/c_api_ndarray.cc
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,7 @@ int MXAutogradBackwardEx(mx_uint num_output,
}

auto grads = Imperative::Get()->Backward(outputs, ograds, variables, is_train,
retain_graph, create_graph);
retain_graph, create_graph);
if (num_variables != 0) {
ret->ret_handles.clear();
ret->out_types.clear();
Expand Down
1 change: 1 addition & 0 deletions src/engine/stream_manager.h
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,7 @@ RunContext StreamManager<kNumGpus, kStreams>::GetRunContext(
#endif // MXNET_USE_CUDA
default:
LOG(FATAL) << "Not Reached";
break;
}
}
return ret;
Expand Down
1 change: 1 addition & 0 deletions src/executor/exec_pass.h
Original file line number Diff line number Diff line change
Expand Up @@ -258,6 +258,7 @@ inline Graph MXGradient(
if (copy_op_str != std::string()) {
graph.attrs["copy_op"] = std::make_shared<any>(std::move(copy_op_str));
}
/// @sa nnvm::pass::Gradient in gradient.cc
return ApplyPass(std::move(graph), "MXGradient");
}
} // namespace pass
Expand Down
Loading