-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathBackPropagationLab.cpp
More file actions
132 lines (124 loc) · 3.12 KB
/
BackPropagationLab.cpp
File metadata and controls
132 lines (124 loc) · 3.12 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
/**
Created on Wed Jul 10 21:55:04 2019
@author: TANVIR
**/
#include<iostream>
#include<math.h>
using namespace std;
class Node
{
public:
float w1;
float w2;
float x1;
float x2;
float b;
float y;
Node(float W1, float W2, float B)
{
w1 = W1;
w2 = W2;
b = B;
}
Node()
{
//cout<<"Use parameterized constructor!"<<endl;
}
float calculate_net()
{
return x1*w1 + x2*w2 + b;
}
float activation(float x)
{
y = 1/(1+ exp(-x));
return y;
}
};
class BackPropagation{
public:
float lr;
float dset[4][3];
Node nd1;
Node nd2;
Node nd3;
BackPropagation(float w11, float w12, float w21, float w22, float w13, float w23, float b1, float b2, float b3, float LR)
{
nd1 = Node(w11,w21,b1);
nd2 = Node(w12,w22,b2);
nd3 = Node(w13,w23,b3);
lr = LR;
cout<<"Enter training dataset:"<<endl;
int i,j;
for(i=0;i<1;i++){
for(j=0;j<3;j++){
cin>>dset[i][j];
}
}
}
void update(float e)
{
nd3.w1 = nd3.w1 + lr*e*nd3.x1;
nd3.w2 = nd3.w2 + lr*e*nd3.x2;
nd3.b = nd3.b + lr*e;
nd2.w1 = nd2.w1 + lr*e*nd2.x1;
nd2.w2 = nd2.w2 + lr*e*nd2.x2;
nd2.b = nd2.b + lr*e;
nd1.w1 = nd1.w1 + lr*e*nd1.x1;
nd1.w2 = nd1.w2 + lr*e*nd1.x2;
nd1.b = nd1.b + lr*e;
}
void training()
{
cout<<"Model is learning.."<<endl;
int i,itr=100000;
float y,t,e;
while(itr--){
for(i=0;i<1;i++){
nd1.x1 = dset[i][0];
nd1.x2 = dset[i][1];
nd2.x1 = dset[i][0];
nd2.x2 = dset[i][1];
nd3.x1 = nd1.activation(nd1.calculate_net());
nd3.x2 = nd2.activation(nd2.calculate_net());
y = nd3.activation(nd3.calculate_net());
t = dset[i][2];
e = (t - y)*y*(1-y);
if(e == 0){
//cout<<"ok"<<endl;
}
else{
//cout<<"updating"<<endl;
update(e);
//i--;
}
}
}
cout<<"Learning completed!"<<endl;
}
float test(float x1, float x2)
{
float y;
nd1.x1 = x1;
nd1.x2 = x2;
nd2.x1 = x1;
nd2.x2 = x2;
nd3.x1 = nd1.activation(nd1.calculate_net());
nd3.x2 = nd2.activation(nd2.calculate_net());
y = nd3.activation(nd3.calculate_net());
cout<<nd1.w1<<" "<<nd1.w2<<endl;
cout<<nd2.w1<<" "<<nd2.w2<<endl;
cout<<nd3.w1<<" "<<nd3.w2<<endl;
return y;
}
};
int main(void)
{
float x1,x2;
BackPropagation obj = BackPropagation(0.1,0.3,0.2,0.4,0.5,0.6,0.1,0.2,0.3,0.1);
obj.training();
cout<<"Enter test data to test the model:"<<endl;
while(cin>>x1>>x2){
cout<<obj.test(x1,x2)<<endl;
}
return 0;
}