lib/xgb/regressor.rb in xgb-0.1.1 vs lib/xgb/regressor.rb in xgb-0.1.2

- old
+ new

@@ -1,39 +1,20 @@ module Xgb - class Regressor - def initialize(max_depth: 3, learning_rate: 0.1, n_estimators: 100, objective: "reg:squarederror", importance_type: "gain") - @params = { - max_depth: max_depth, - objective: objective, - learning_rate: learning_rate - } - @n_estimators = n_estimators - @importance_type = importance_type + class Regressor < Model + def initialize(max_depth: 3, learning_rate: 0.1, n_estimators: 100, objective: "reg:squarederror", importance_type: "gain", **options) + super end - def fit(x, y) + def fit(x, y, eval_set: nil, early_stopping_rounds: nil, verbose: true) dtrain = DMatrix.new(x, label: y) - @booster = Xgb.train(@params, dtrain, num_boost_round: @n_estimators) - nil - end + evals = Array(eval_set).map.with_index { |v, i| [DMatrix.new(v[0], label: v[1]), "validation_#{i}"] } - def predict(data) - dmat = DMatrix.new(data) - @booster.predict(dmat) - end - - def save_model(fname) - @booster.save_model(fname) - end - - def load_model(fname) - @booster = Booster.new(params: @params, model_file: fname) - end - - def feature_importances - score = @booster.score(importance_type: @importance_type) - scores = @booster.feature_names.map { |k| score[k] || 0.0 } - total = scores.sum.to_f - scores.map { |s| s / total } + @booster = Xgb.train(@params, dtrain, + num_boost_round: @n_estimators, + early_stopping_rounds: early_stopping_rounds, + verbose_eval: verbose, + evals: evals + ) + nil end end end