Skip to content
This repository was archived by the owner on Jul 16, 2021. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
38 commits
Select commit Hold shift + click to select a range
eb984fe
Implemented RMSProp
NivenT Sep 3, 2016
5106ed6
Made a few RMSProp comments more readable
NivenT Sep 3, 2016
24f9d2b
Removed clone
NivenT Sep 3, 2016
1724595
Merge branch 'master' of https://github.com/AtheMathmo/rusty-machine
NivenT Sep 4, 2016
1e15d5a
Created NetLayer trait
NivenT Sep 8, 2016
d139999
Added hyperbolic tangent activation function
NivenT Sep 8, 2016
ff99831
Update neural net implementation
NivenT Sep 9, 2016
8cbe742
Combined Bias and Linear layers
NivenT Sep 9, 2016
e19c427
Replace Linear::{new, default} with Linear::with{out}_bias
NivenT Sep 9, 2016
40dcdcc
Expanded nnet module
NivenT Sep 21, 2016
f7fe44d
Fixed merge conflicts
NivenT Sep 21, 2016
3588dc4
Removed apply
NivenT Sep 22, 2016
ac7d31a
Removed clone from compute_grad
NivenT Sep 22, 2016
db35b1a
Added add_layers example
NivenT Sep 22, 2016
d96e99e
improved network performance
NivenT Sep 24, 2016
59b280b
Fixed doc test error
NivenT Sep 25, 2016
38bd334
backprop uses regularization
NivenT Sep 26, 2016
7d1a1fb
Fixed merge conflicts
NivenT Sep 27, 2016
90e6a8c
Added error handling to neural nets
NivenT Oct 4, 2016
9d05629
Improve neural network performance
NivenT Oct 8, 2016
d555bef
removed ActFunc from Criterion
NivenT Oct 8, 2016
999524e
Replace select_cols with simpler sub_slice.into plus some reformatting
tafia Oct 8, 2016
05a0015
Merge pull request #1 from tafia/pr126_modif
NivenT Oct 9, 2016
e7e2a8c
Merge branch 'master' of https://github.com/AtheMathmo/rusty-machine
NivenT Oct 9, 2016
af17995
extend_from_slice instead of append
tafia Oct 9, 2016
7851cb2
Merge pull request #2 from tafia/pr126_modif
NivenT Oct 10, 2016
c4b31c7
Merge branch 'master' of https://github.com/NivenT/rusty-machine
NivenT Oct 10, 2016
4c7ed37
bias appended to beginning of inputs
NivenT Oct 14, 2016
1fc610e
Merge branch 'master' of https://github.com/AtheMathmo/rusty-machine
NivenT Oct 26, 2016
fcca7b7
Compute regularization cost/grad in one step
NivenT Oct 26, 2016
66f53fe
Removed useless 'a and &
NivenT Nov 8, 2016
ccef5d3
removed useless Debug's
NivenT Jan 6, 2017
c236101
back_* functions now take the layer's output as an input
NivenT Jan 6, 2017
775f7eb
Fixed typo
NivenT Jan 22, 2017
4a2e568
Merge branch 'master' into NivenT-master
AtheMathmo Feb 21, 2017
95a74c5
Minor cleanup and removing a matrix clone
AtheMathmo Feb 21, 2017
165a14a
Merge branch 'master' of https://github.com/NivenT/rusty-machine
NivenT Feb 22, 2017
34ea5d3
small changes
NivenT Feb 25, 2017
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions benches/examples/nnet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use std::vec::Vec;

use rusty_machine::learning::nnet::{NeuralNet, BCECriterion};
use rusty_machine::learning::toolkit::regularization::Regularization;
use rusty_machine::learning::toolkit::activ_fn::Sigmoid;
use rusty_machine::learning::optim::grad_desc::StochasticGD;

use rusty_machine::linalg::Matrix;
Expand Down Expand Up @@ -51,7 +52,7 @@ fn nnet_and_gate_train(b: &mut Bencher) {
let criterion = BCECriterion::new(Regularization::L2(0.));

b.iter(|| {
let mut model = black_box(NeuralNet::new(layers, criterion, StochasticGD::default()));
let mut model = black_box(NeuralNet::mlp(layers, criterion, StochasticGD::default(), Sigmoid));
let _ = black_box(model.train(&inputs, &targets).unwrap());
})
}
Expand All @@ -62,7 +63,7 @@ fn nnet_and_gate_predict(b: &mut Bencher) {
let layers = &[2, 1];
let criterion = BCECriterion::new(Regularization::L2(0.));

let mut model = NeuralNet::new(layers, criterion, StochasticGD::default());
let mut model = NeuralNet::mlp(layers, criterion, StochasticGD::default(), Sigmoid);
let _ = model.train(&inputs, &targets);

b.iter(|| {
Expand Down
5 changes: 4 additions & 1 deletion examples/nnet-and_gate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use std::vec::Vec;

use rusty_machine::learning::nnet::{NeuralNet, BCECriterion};
use rusty_machine::learning::toolkit::regularization::Regularization;
use rusty_machine::learning::toolkit::activ_fn::Sigmoid;
use rusty_machine::learning::optim::grad_desc::StochasticGD;

use rusty_machine::linalg::Matrix;
Expand Down Expand Up @@ -41,7 +42,9 @@ fn main() {

let layers = &[2, 1];
let criterion = BCECriterion::new(Regularization::L2(0.));
let mut model = NeuralNet::new(layers, criterion, StochasticGD::default());
// Create a multilayer perceptron with an input layer of size 2 and output layer of size 1
// Uses a Sigmoid activation function and uses Stochastic gradient descent for training
let mut model = NeuralNet::mlp(layers, criterion, StochasticGD::default(), Sigmoid);

println!("Training...");
// Our train function returns a Result<(), E>
Expand Down
Loading