lib/ai4r/neural_network/backpropagation.rb in ai4r-1.9 vs lib/ai4r/neural_network/backpropagation.rb in ai4r-1.11
- old
+ new
@@ -98,11 +98,11 @@
"lambda { |y| y*(1-y) }, where y=propagation_function(x)",
:learning_rate => "By default 0.25",
:momentum => "By default 0.1. Set this parameter to 0 to disable "+
"momentum."
- attr_accessor :structure, :weights, :activation_nodes
+ attr_accessor :structure, :weights, :activation_nodes, :last_changes
# Creates a new network specifying the its architecture.
# E.g.
#
# net = Backpropagation.new([4, 3, 2]) # 4 inputs
@@ -156,11 +156,44 @@
init_activation_nodes
init_weights
init_last_changes
return self
end
-
+
protected
+
+ # Custom serialization. It used to fail trying to serialize because
+ # it uses lambda functions internally, and they cannot be serialized.
+ # Now it does not fail, but if you customize the values of
+ # * initial_weight_function
+ # * propagation_function
+ # * derivative_propagation_function
+ # you must restore their values manually after loading the instance.
+ def marshal_dump
+ [
+ @structure,
+ @disable_bias,
+ @learning_rate,
+ @momentum,
+ @weights,
+ @last_changes,
+ @activation_nodes
+ ]
+ end
+
+ def marshal_load(ary)
+ @structure,
+ @disable_bias,
+ @learning_rate,
+ @momentum,
+ @weights,
+ @last_changes,
+ @activation_nodes = ary
+ @initial_weight_function = lambda { |n, i, j| ((rand 2000)/1000.0) - 1}
+ @propagation_function = lambda { |x| 1/(1+Math.exp(-1*(x))) } #lambda { |x| Math.tanh(x) }
+ @derivative_propagation_function = lambda { |y| y*(1-y) } #lambda { |y| 1.0 - y**2 }
+ end
+
# Propagate error backwards
def backpropagate(expected_output_values)
check_output_dimension(expected_output_values.length)
calculate_output_deltas(expected_output_values)