Sha256: 54cddd83c41b1a00a37e6b73c6cddb8fae34ed2495fb6a9011a4e5c0c1b21a3e

Contents?: true

Size: 1.46 KB

Versions: 2

Compression:

Stored size: 1.46 KB

Contents

# -*- coding: utf-8 -*-

class Perceptron
  @synapses
  @bias # Schwellwert
  @f # Aktivierungsfunktion
  @do_trace # prints trace
  
  def initialize options={}
    @bias = options[:bias] ||= 0.0
    @do_trace = options[:trace] ||= false
    
    if options[:activation_function].is_a? Symbol
      @f = ActivationFunctions.method options[:activation_function]
    end
    
    if options[:synapses].is_a? Integer
      @synapses = Array.new(options[:synapses]) { Synapse.new }
    end
    
  end
  
  def a
    net = -(@bias)
    @synapses.each { |synapse|
      net += (synapse.fire)
    }
    @f.call net
  end
  
  #Training
  def train input=[], output=1
    raise "size of traindata does not correspond with input size" if input.size != @synapses.size

    @synapses.each_with_index { |synapse, i|
      synapse.x = input[i]
    }
    a = self.a()
    
    unless a == output
      y = output
      self.learn y-a
    end
    
    if @do_trace
      puts " #{input[0]} |  #{input[1]} | #{output} || #{a}"
    end
  end
  
  # Learning
  def learn error
    # Wichtungen der Synapsen trainieren
    @synapses.each { |synapse|
      synapse.backpropagate error # backpropagate error
    }
    # Schwellwert trainieren
    @bias -= LEARNRATE * error
  end
  
  def recall input=[]
    raise "size of traindata does not correspond with input size" if input.size != @synapses.size
    
    @synapses.each_with_index { |synapse, i|
      synapse.x = input[i]
    }
    self.a()
    
  end
end

Version data entries

2 entries across 2 versions & 1 rubygems

Version Path
Santino-neurotic-0.0.0 lib/perceptron.rb
Santino-neurotic-0.0.1 lib/perceptron.rb