Post by tsh73 on Aug 3, 2023 14:06:16 GMT
follow-up to this thread
justbasiccom.proboards.com/thread/1025/help-learning-program
The base source is www.youtube.com/watch?v=n2L1J5JYgUk
Solved Example Back Propagation Algorithm Multi-Layer Perceptron Network by Dr. Mahesh Huddar.
Taken that video, implemented all formulas
It spits same numbers as in video (sans scientific formatting)
If you set number of passes big, output approaches target value (1)
Now, will it be of any help?
justbasiccom.proboards.com/thread/1025/help-learning-program
The base source is www.youtube.com/watch?v=n2L1J5JYgUk
Solved Example Back Propagation Algorithm Multi-Layer Perceptron Network by Dr. Mahesh Huddar.
Taken that video, implemented all formulas
It spits same numbers as in video (sans scientific formatting)
========== Pass 1 ==========
forward --------------------
a(4)=-0.7 O(4)=0.33181223
a(5)=0.1 O(5)=0.52497919
a(6)=-0.10453951 O(6)=0.4738889
Output is 0.4738889 Error=0.5261111
backward --------------------
delta(6)=0.13116908
delta(4)=-0.8724562e-2
delta(5)=-0.65420851e-2
dw(1,4)=-0.78521058e-2 w(1,4)=0.19214789
dw(1,5)=-0.58878766e-2 w(1,5)=-0.30588788
dw(2,4)=0 w(2,4)=0.4
dw(2,5)=0 w(2,5)=0.1
dw(3,4)=-0.78521058e-2 w(3,4)=-0.50785211
dw(3,5)=-0.58878766e-2 w(3,5)=0.19411212
dw(4,6)=0.39171154e-1 w(4,6)=-0.26082885
dw(5,6)=0.61974932e-1 w(5,6)=-0.13802507
b(4)=-0.40785211
b(5)=0.19411212
b(6)=0.21805217
========== Pass 2 ==========
forward --------------------
a(4)=-0.72355632 O(4)=0.32661034
a(5)=0.8233637e-1 O(5)=0.52057247
a(6)=0.61010722e-1 O(6)=0.51524795
Output is 0.51524795 Error=0.48475205
If you set number of passes big, output approaches target value (1)
Now, will it be of any help?
'back propagation example, by YouTube video
'link https://www.youtube.com/watch?v=n2L1J5JYgUk
'v1: forward propagation
'v2: + backward
'v3: + second pass
nX=3 'number of inputs
nHid=2 '2 hidded neurons, 1 layer, numbered 4,5
nOut=1 '1 output neuron, numbered 6
dim x(nX) 'inputs
dim w(nX+nHid,nX+nHid+nOut) 'weights
dim u(nX+nHid,nX+nHid+nOut) '1 if used, to skip 0 then printing
dim dw(nX+nHid,nX+nHid+nOut) 'delta of weights
dim b(nX+nHid+nOut) 'bias
dim a(nX+nHid+nOut) 'temp (pre-sigmoid?) output
dim O(nX+nHid+nOut) 'neuron output
dim delta(nX+nHid+nOut)
[readData]
print "Initializing test data"
restore [teachingData]
for i = 1 to nX
read x:x(i)=x
O(i)=x
print x(i);" ";
next
read Target
print Target
print
print "-----------"
[readNeurons]
print "Initializing neurons"
for i = 1 to nHid+nOut
read idN, numInp
print idN, numInp
for j = 1 to numInp
read idInp, w
w(idInp,idN)=w
u(idInp,idN)=1
print , idInp, w(idInp,idN)
next
read b: b(idN)=b
print ,,b(idN)
next
print "-----------"
numPasses=200 'indeed it does converge to 1
numPasses=2
for pass= 1 to numPasses
print "========== Pass ";pass;" =========="
[forvPass]
print "forward --------------------"
[hidden] '4,5
[output] '6
for j = 4 to 6
a(j)=b(j)
for i = 1 to 5 'all possible inputs, w is 0 then not needed
a(j)=a(j)+w(i,j)*O(i) 'x1..x3 copied to O1..O3
next
O(j)=sigm(a(j))
print "a(";j;")=";a(j), "O(";j;")=";O(j)
next
theErr=Target-O(6)
print "Output is ";O(6),"Error=";theErr
if numPasses=pass then exit for 'do not back pass on last one
[backPass]
print "backward --------------------"
[outputUnit]
i=6
delta(6)=O(i)*(1-O(i))*(Target-O(i))
print "delta(";i;")=";delta(i)
j=6
for i = 4 to 5
delta(i)=O(i)*(1-O(i))*w(i,j)*delta(j)
print "delta(";i;")=";delta(i)
next
nu=0.9
for j=1 to 5
for i = 1 to 6
if u(j,i) then
dw(j,i)=nu*delta(i)*O(j)
print "dw(";j;",";i;")=";dw(j,i),
w(j,i)=w(j,i)+dw(j,i)
print "w(";j;",";i;")=";w(j,i)
end if
next
next
'bias
for i = 4 to 6
b(i)=b(i)+nu*delta(i)
print "b(";i;")=";b(i)
next
next 'pass
'-------------------------------------
input "press Enter to exit";dummy$
[teachingData]
'x1 x2 x3 output
data 1, 0, 1, 1
[neuronData]
'idNeuron,
' numInputs*(idInput, Winput),
' bias
data 4,3
data 1, 0.2
data 2, 0.4
data 3, -0.5
data -0.4
data 5,3
data 1, -0.3
data 2, 0.1
data 3, 0.2
data 0.2
data 6,2
data 4, -0.3
data 5, -0.2
data 0.1
function sigm(a)
sigm=1/(1+exp(0-a))
end function