lib/rubyneat/neuron.rb in rubyneat-0.3.5.alpha.5 vs lib/rubyneat/neuron.rb in rubyneat-0.3.5.alpha.6
- old
+ new
@@ -28,10 +28,16 @@
attr_accessor :output
# List of neuron types defined.
@@neuron_types = []
+ # Type names must always be unique for Neurons.
+ # TODO: Enforce uniqueness in neural type names
+ def self.type_name
+ @type_name ||= self.to_s.split('::').last.split('Neuron').first.downcase
+ end
+
# Class is is of Input type?
def self.input? ; false ; end
def input? ; self.class.input? ; end
def self.bias? ; false ; end
@@ -55,11 +61,10 @@
raise NeatException.new "express() must be implemented by subclass."
end
end
-
=begin rdoc
= Basic Neuron Types
Basically, the neurons (nodes) will have an instantiation to represent their places in the
neural net, and way to spin up the phenotypic representation.
@@ -142,9 +147,37 @@
# create a function on the instance with our name
# that sums all inputs and produce a sigmoid output (using tanh)
def express(instance)
instance.define_singleton_method(@name) {|*inputs|
cos(1.6 * inputs.reduce {|p, q| p + q})
+ }
+ end
+ end
+
+ # Linear function (CPPN) -- simply add up all the inputs.
+ class LinearNeuron < Neuron
+ # create a function on the instance with our name
+ # that sums all inputs only.
+ def express(instance)
+ instance.define_singleton_method(@name) {|*inputs|
+ inputs.reduce {|p, q| p + q}
+ }
+ end
+ end
+
+ # Gaussian function (CPPN) -- SD 1 of inputs
+ class GaussianNeuron < Neuron
+ # create a function on the instance with our name
+ # that sums all inputs and produce a gaussian of
+ # standard deviation of 1.
+ def express(instance)
+ instance.define_singleton_method(@name) { |*inputs|
+ a = 1.0 #height
+ b = 0.0 #center
+ c = 1.0 #SD
+ d = 0.0 #lowest y point
+ x = inputs.reduce {|p, q| p + q}
+ a * exp(-(x - b)**2.0 / 2*c**2.0) + d
}
end
end
end