lib/dnn/core/optimizers.rb in ruby-dnn-0.16.2 vs lib/dnn/core/optimizers.rb in ruby-dnn-1.0.0
- old
+ new
@@ -7,11 +7,11 @@
def self.from_hash(hash)
return nil unless hash
optimizer_class = DNN.const_get(hash[:class])
optimizer = optimizer_class.allocate
- raise DNN_Error, "#{optimizer.class} is not an instance of #{self} class." unless optimizer.is_a?(self)
+ raise DNNError, "#{optimizer.class} is not an instance of #{self} class." unless optimizer.is_a?(self)
optimizer.load_hash(hash)
optimizer
end
# @param [Float | NilClass] clip_norm Gradient clip norm.
@@ -47,10 +47,9 @@
end
private def clip_grads(params)
norm = Math.sqrt(params.reduce(0) { |total, param| total + (param.grad**2).sum })
return if norm <= @clip_norm
-
rate = @clip_norm / (norm + 1e-7)
params.each do |param|
param.grad *= rate
end
end