Skip to content
This repository was archived by the owner on Nov 17, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions R-package/NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ export(mx.io.arrayiter)
export(mx.io.extract)
export(mx.kv.create)
export(mx.metric.accuracy)
export(mx.metric.custom)
export(mx.metric.rmse)
export(mx.model.FeedForward.create)
export(mx.model.load)
export(mx.model.save)
Expand Down
2 changes: 1 addition & 1 deletion R-package/R/io.R
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ mx.io.arrayiter <- function(data, label,
if (shuffle) {
unif.rnds <- as.array(mx.runif(c(length(label)), ctx=mx.cpu()));
} else {
unif.rnds <- mx.array(0)
unif.rnds <- as.array(0)
}
mx.io.internal.arrayiter(as.array(data),
as.array(label),
Expand Down
18 changes: 14 additions & 4 deletions R-package/R/metric.R
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
# create a customized metric based on feval(label, pred)
#' Helper function to create a customized metric
#'
#' @export
mx.metric.custom <-function(name, feval) {
init <- function() {
c(0, 0)
}
update <- function(label, pred, state) {
m <- feval(label, pred)
m <- feval(as.array(label), as.array(pred))
state <- c(state[[1]] + 1, state[[2]] + m)
return(state)
}
Expand All @@ -20,6 +22,14 @@ mx.metric.custom <-function(name, feval) {
#'
#' @export
mx.metric.accuracy <- mx.metric.custom("accuracy", function(label, pred) {
ypred = max.col(as.array(pred), tie="first")
return(sum((as.array(label) + 1) == ypred) / length(label))
ypred = max.col(pred, tie="first")
return(sum((label + 1) == ypred) / length(label))
})

#' RMSE metric
#'
#' @export
mx.metric.rmse <- mx.metric.custom("rmse", function(label, pred) {
res <- sqrt(mean((label-pred)^2))
return(res)
})
2 changes: 1 addition & 1 deletion R-package/R/model.R
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ mx.model.FeedForward.create <-
function(symbol, X, y=NULL, ctx=NULL,
num.round=10, optimizer="sgd",
initializer=mx.init.uniform(0.01),
eval.data=NULL, eval.metric=mx.metric.accuracy,
eval.data=NULL, eval.metric=NULL,
iter.end.callback=NULL, epoch.end.callback=NULL,
array.batch.size=128,
kvstore="local",
Expand Down
12 changes: 12 additions & 0 deletions R-package/man/mx.metric.custom.Rd
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
% Generated by roxygen2 (4.1.1): do not edit by hand
% Please edit documentation in R/metric.R
\name{mx.metric.custom}
\alias{mx.metric.custom}
\title{Helper function to create a customized metric}
\usage{
mx.metric.custom(name, feval)
}
\description{
Helper function to create a customized metric
}

20 changes: 20 additions & 0 deletions R-package/man/mx.metric.rmse.Rd
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
% Generated by roxygen2 (4.1.1): do not edit by hand
% Please edit documentation in R/metric.R
\docType{data}
\name{mx.metric.rmse}
\alias{mx.metric.rmse}
\title{RMSE metric}
\format{\preformatted{List of 3
$ init :function ()
$ update:function (label, pred, state)
$ get :function (state)
- attr(*, "class")= chr "mx.metric"
}}
\usage{
mx.metric.rmse
}
\description{
RMSE metric
}
\keyword{datasets}

6 changes: 3 additions & 3 deletions R-package/man/mx.model.FeedForward.create.Rd
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@
\usage{
mx.model.FeedForward.create(symbol, X, y = NULL, ctx = NULL,
num.round = 10, optimizer = "sgd", initializer = mx.init.uniform(0.01),
eval.data = NULL, eval.metric = mx.metric.accuracy,
iter.end.callback = NULL, epoch.end.callback = NULL,
array.batch.size = 128, kvstore = "local", ...)
eval.data = NULL, eval.metric = NULL, iter.end.callback = NULL,
epoch.end.callback = NULL, array.batch.size = 128, kvstore = "local",
...)
}
\arguments{
\item{symbol}{The symbolic configuration of the neural network.}
Expand Down
150 changes: 150 additions & 0 deletions R-package/vignettes/fiveMinutesNeuralNetwork.Rmd
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
Neural Network with MXNet in Five Minutes
=============================================

This is the first tutorial for new users of the R package `mxnet`. You will learn to construct a neural network to do regression in 5 minutes.

We will show you how to do classification and regression tasks respectively. The data we use comes from the package `mlbench`.

## Classification

First of all, let us load in the data and preprocess it:

```{r}
require(mlbench)
require(mxnet)

data(Sonar, package="mlbench")

Sonar[,61] = as.numeric(Sonar[,61])-1
train.ind = c(1:50, 100:150)
train.x = data.matrix(Sonar[train.ind, 1:60])
train.y = Sonar[train.ind, 61]
test.x = data.matrix(Sonar[-train.ind, 1:60])
test.y = Sonar[-train.ind, 61]
```

The next step is to define the structure of the neural network.

```{r}
# Define the input data
data <- mx.symbol.Variable("data")
# A fully connected hidden layer
# data: input source
# name: fc1
# num_hidden: number of neurons in this hidden layer
fc1 <- mx.symbol.FullyConnected(data, name="fc1", num_hidden=20)

# An activation function
# fc1: input source
# name: relu1
# act_type: type for the activation function
act1 <- mx.symbol.Activation(fc1, name="tanh1", act_type="tanh")
fc2 <- mx.symbol.FullyConnected(act1, name="fc2", num_hidden=2)

# Softmax function for the output layer
softmax <- mx.symbol.Softmax(fc2, name="sm")
```

According to the comments in the code, you can see the meaning of each function and its arguments. They can be easily modified according to your need.

Before we start to train the model, we can specify where to run our program:

```{r}
device.cpu = mx.cpu()
```

Here we choose to run it on CPU.

After the network configuration, we can start the training process:

```{r}
mx.set.seed(0)
model <- mx.model.FeedForward.create(softmax, X=train.x, y=train.y,
ctx=device.cpu, num.round=20, array.batch.size=15,
learning.rate=0.07, momentum=0.9, eval.metric=mx.metric.accuracy,
epoch.end.callback=mx.callback.log.train.metric(100))
```

Note that `mx.set.seed` is the correct function to control the random process in `mxnet`. You can see the accuracy in each round during training. It is also easy to make prediction and evaluate

```{r}
preds = predict(model, test.x)
pred.label = max.col(preds)-1
table(pred.label, test.y)
```

## Regression

Again, let us preprocess the data first.

```{r}
data(BostonHousing, package="mlbench")

train.ind = seq(1, 506, 3)
train.x = data.matrix(BostonHousing[train.ind, -14])
train.y = BostonHousing[train.ind, 14]
test.x = data.matrix(BostonHousing[-train.ind, -14])
test.y = BostonHousing[-train.ind, 14]
```

We can configure a similar network as what we have done above. The only difference is in the output activation:

```{r}
# Define the input data
data <- mx.symbol.Variable("data")
# A fully connected hidden layer
# data: input source
# name: fc1
# num_hidden: number of neurons in this hidden layer
fc1 <- mx.symbol.FullyConnected(data, name="fc1", num_hidden=20)

# An activation function
# fc1: input source
# name: relu1
# act_type: type for the activation function
act1 <- mx.symbol.Activation(fc1, name="tanh1", act_type="tanh")
fc2 <- mx.symbol.FullyConnected(act1, name="fc2", num_hidden=1)

# Softmax function for the output layer
lro <- mx.symbol.LinearRegressionOutput(fc2, name="lro")
```

What we changed is mainly the last function, this enables the new network to optimize for squared loss. We can now train on this simple data set.

```{r}
mx.set.seed(0)
model <- mx.model.FeedForward.create(lro, X=train.x, y=train.y,
ctx=device.cpu, num.round=5, array.batch.size=10,
learning.rate=0.1, momentum=0.9, eval.metric=mx.metric.rmse,
epoch.end.callback=mx.callback.log.train.metric(100))
```

It is also easy to make prediction and evaluate

```{r}
preds = predict(model, test.x)
sqrt(mean((preds-test.y)^2))
```

Currently we have two pre-defined metrics "accuracy" and "rmse". One might wonder how to customize the evaluation metric. `mxnet` provides the interface for users to define their own metric of interests:

```{r}
demo.metric.mae <- mx.metric.custom("mae", function(label, pred) {
res <- mean(abs(label-pred))
return(res)
})
```

This is an example for mean absolute error. We can simply plug it in the training function:

```{r}
mx.set.seed(0)
model <- mx.model.FeedForward.create(lro, X=train.x, y=train.y,
ctx=device.cpu, num.round=5, array.batch.size=10,
learning.rate=0.1, momentum=0.9, eval.metric=demo.metric.mae,
epoch.end.callback=mx.callback.log.train.metric(100))
```

Congratulations! Now you have learnt the basic for using `mxnet`.


Loading