Skip to content
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
50 changes: 40 additions & 10 deletions lib/nn.js
Original file line number Diff line number Diff line change
@@ -1,20 +1,20 @@
// Other techniques for learning

class ActivationFunction{
constructor(func, dfunc){
class ActivationFunction {
constructor(func, dfunc) {
this.func = func;
this.dfunc = dfunc;
}
}

let sigmoid = new ActivationFunction(
x => 1 / (1 + Math.exp(-x)),
y => y * (1- y)
y => y * (1 - y)
);

let tanh = new ActivationFunction(
x => Math.tanh(x),
y => 1-(y*y)
y => 1 - (y * y)
);


Expand All @@ -33,10 +33,15 @@ class NeuralNetwork {
this.bias_o = new Matrix(this.output_nodes, 1);
this.bias_h.randomize();
this.bias_o.randomize();
this.setLearningRate();

this.setActivationFunction();
this.learningRateDecay = false;
this.decayRatio = 0;
this.numberOfTrainings = 0;
this.countOfTrainings = 0;


this.setLearningRate();
this.setActivationFunction();
}

predict(input_array) {
Expand All @@ -57,12 +62,35 @@ class NeuralNetwork {
return output.toArray();
}

setActivationFunction(func = sigmoid) {
this.activation_function = func;
}

setLearningRate(learning_rate = 0.1) {
this.learning_rate = learning_rate;
}

setActivationFunction(func = sigmoid) {
this.activation_function = func;
enableLearningRateDecay(number_of_trainings = 1000, decay_ratio = 0.01) {
this.learningRateDecay = true;
this.numberOfTrainings = number_of_trainings;
this.decayRatio = decay_ratio;
}

disableLearningRateDecay() {
this.learningRateDecay = false;
}

checkLearningRateDecay() {
if (this.countOfTrainings > 0 && this.numberOfTrainings > 0 && this.learningRateDecay && this.learning_rate > 0) {
if (this.countOfTrainings % this.numberOfTrainings === 0) {
let newLearningRate = this.learning_rate * (1 - this.decayRatio);
if (newLearningRate <= 0) {
disableLearningRateDecay();
return;
}
this.setLearningRate(newLearningRate);
}
}
}

train(input_array, target_array) {
Expand Down Expand Up @@ -118,6 +146,9 @@ class NeuralNetwork {
// Adjust the bias by its deltas (which is just the gradients)
this.bias_h.add(hidden_gradient);

this.countOfTrainings++;
this.checkLearningRateDecay();

// outputs.print();
// targets.print();
// error.print();
Expand All @@ -128,8 +159,7 @@ class NeuralNetwork {
}

static deserialize(data) {
if(typeof data == 'string')
{
if (typeof data == 'string') {
data = JSON.parse(data);
}
let nn = new NeuralNetwork(data.input_nodes, data.hidden_nodes, data.output_nodes);
Expand Down