Neural Netwerk

Algemene discussies en vragen omtrent Liberty BASIC programma's. Stuur zoveel mogelijk delen van listings (snippets) in als je hulp wilt.

Moderators: anthonio, Abcott

Neural Netwerk

Berichtdoor bluatigro » vr jun 01, 2012 11:29 am

dit is n poging tot n kunstmatig Neral Netwerk

het doet al wat
het is niet foutloos
Code: Selecteer alles
global lsize$ , layermax , hoog , alpha , beta, w

  '' first we create the network
  lsize$ = "3 3 3 1"
  alpha = 0.01
  beta = 0.001
  layermax = 0
  hoog = 1

  while word$( lsize$ , layermax ) <> ""
    if val( word$( lsize$ , layermax ) ) > hoog then
      hoog = val( word$( lsize$ , layermax ) )
    end if
    layermax = layermax + 1
  wend

  dim w( hoog ^ 2 * layermax ) , pw( hoog ^ 2 * layermax )
  dim uit( layermax , hoog ), d( layermax , hoog )

  for i = 0 to hoog ^ 2 * layermax
    w( i ) = rnd(0)
    pw( i ) = 0
  next i

''then we train it
done = -1
while done = -1
  for a = 0 to 1
    for b = 0 to 1
      for c = 0 to 1
        call bpgt str$( a ) ; " " ; b ; " " ; c , str$(a xor b xor c)
      next c
    next b
  next a
  done = not( 0 )
  for a = 0 to 1
    for b = 0 to 1
      for c = 0 to 1
        call ffwd  str$( a ) ; " " ; b ; " " ; c
        if uit( layermax , 1 ) <> a xor b xor c then done = 0
      next c
    next b
  next a
wend

'' then run it
for a = 0 to 1
  for b = 0 to 1
    for c = 0 to 1
      call ffwd str$( a ) ; " " ; b ; " " ; c
      print a ; b ; c , uit( layermax , 1 ) , a xor b xor c
    next c
  next b
next a
end

sub ffwd in$
'' feed forward one set of input

  '' assign content to input layer
  for ii = 0 to val( word$( lsize$ , 0 ) )
    uit( 0 , ii ) = val( word$( in$ , ii ) )
  next ii

  '' assign output(activation) value
  '' to each neuron usng sigmoid func
  '' For each layer
  for l = 1 to layermax
    '' for each neuron
    for n = 0 to val( word$( lsize$ , l ) )
      sum = 0
      '' for each neuron in previos layer
      for k = 0 to val( word$( lsize$ , l - 1 ) )
        sum = sum + uit( l - 1 , n ) * w( index( l , n , k ) )
      next k
      uit( l , n ) = signoid( sum )
    next n
  next l
end sub

function index( a , b , c )
  index = a + b * hoog + c * hoog ^ 2
end function

sub bpgt in$, target$
  call ffwd in$

  '' find the delta's for the outputlayer
  for ii = 0 to val( word$( lsize$ , layermax ) )
    d( layermax , ii ) = uit( layermax , ii ) _
    * ( 1 - uit( layermax , ii ) ) _
    * ( val( word$( target$ , ii ) ) - uit( layermax , ii ) )
  next ii

  '' find the delta for the hidden layers
  for ii = layermax - 1 to 0 step -1
    for j = 0 to val( word$( lsize$ , ii ) )
      sum = 0
      for k = 0 to val( word$( lsize$ , ii + 1 ) )
        sum = d( ii + 1 , k ) * w( index( ii + 1 , k , j ) )
      next k
      d( ii , j ) = uit( ii , j ) * ( 1 - uit( ii , j ) ) * sum
    next j
  next ii

  '' apply momentum
  for ii = 1 to layermax
    for j = 0 to val( word$( lsize$ , ii ) )
      for k = 0 to val( word$( lsize$ , ii - 1 ) )
        w( index( ii , j , k ) ) = alpha( index( ii , j , k ) )
      next k
    next j
  next ii

  '' apply the correction
  for ii = 1 to layermax
    for j = 0 to val( word$( lsize$ , ii ) )
      for k = 0 to val( word$( lsize$ , ii - 1 ) )
        pw( index( ii , j , k ) ) = beta * d( ii , j ) * uit( ii - 1 , k )
        w( index( ii , j , k ) ) = pw( index( ii , j , k ) )
      next k
    next j
  next ii
end sub

function signoid( x )
  signoid = 1 / (1 + exp( x ) )
end function

bluatigro
 
Berichten: 306
Geregistreerd: za sep 27, 2008 6:01 pm

Keer terug naar Liberty BASIC Code

Wie is er online

Gebruikers op dit forum: Geen geregistreerde gebruikers. en 1 gast

cron