lib/meilisearch-rails.rb in meilisearch-rails-0.7.0 vs lib/meilisearch-rails.rb in meilisearch-rails-0.7.1

- old
+ new

@@ -231,13 +231,17 @@ # are correctly logged or thrown depending on the `raise_on_failure` option class SafeIndex def initialize(index_uid, raise_on_failure, options) client = MeiliSearch::Rails.client primary_key = options[:primary_key] || MeiliSearch::Rails::IndexSettings::DEFAULT_PRIMARY_KEY - client.create_index(index_uid, { primaryKey: primary_key }) - @index = client.index(index_uid) @raise_on_failure = raise_on_failure.nil? || raise_on_failure + + SafeIndex.log_or_throw(nil, @raise_on_failure) do + client.create_index(index_uid, { primary_key: primary_key }) + end + + @index = client.index(index_uid) end ::MeiliSearch::Index.instance_methods(false).each do |m| define_method(m) do |*args, &block| if m == :update_settings @@ -271,11 +275,11 @@ end end def self.log_or_throw(method, raise_on_failure, &block) yield - rescue ::MeiliSearch::ApiError => e + rescue ::MeiliSearch::TimeoutError, ::MeiliSearch::ApiError => e raise e if raise_on_failure # log the error (::Rails.logger || Logger.new($stdout)).error("[meilisearch-rails] #{e.message}") # return something @@ -607,20 +611,34 @@ # Returns raw json hits as follows: # {"hits"=>[{"id"=>"13", "href"=>"apple", "name"=>"iphone"}], "offset"=>0, "limit"=>|| 20, "estimatedTotalHits"=>1, # "processingTimeMs"=>0, "query"=>"iphone"} json = ms_raw_search(query, params) - # Returns the ids of the hits: 13 - hit_ids = json['hits'].map { |hit| hit[ms_pk(meilisearch_options).to_s] } - # condition_key gets the primary key of the document; looks for "id" on the options condition_key = if defined?(::Mongoid::Document) && include?(::Mongoid::Document) ms_primary_key_method.in else ms_primary_key_method end + # The condition_key must be a valid column otherwise, the `.where` below will not work + # Since we provide a way to customize the primary_key value, `ms_pk(meilisearch_options)` may not + # respond with a valid database column. The blocks below prevent that from happening. + has_virtual_column_as_pk = if defined?(::Sequel::Model) && self < Sequel::Model + meilisearch_options[:type].columns.map(&:to_s).exclude?(condition_key.to_s) + else + meilisearch_options[:type].columns.map(&:name).map(&:to_s).exclude?(condition_key.to_s) + end + + condition_key = meilisearch_options[:type].primary_key if has_virtual_column_as_pk + + hit_ids = if has_virtual_column_as_pk + json['hits'].map { |hit| hit[condition_key] } + else + json['hits'].map { |hit| hit[ms_pk(meilisearch_options).to_s] } + end + # meilisearch_options[:type] refers to the Model name (e.g. Product) # results_by_id creates a hash with the primaryKey of the document (id) as the key and doc itself as the value # {"13"=>#<Product id: 13, name: "iphone", href: "apple", tags: nil, type: nil, # description: "Puts even more features at your fingertips", release_date: nil>} results_by_id = meilisearch_options[:type].where(condition_key => hit_ids).index_by do |hit| @@ -825,10 +843,11 @@ raise ArgumentError, "Unknown constraint type: #{constraint} (#{constraint.class})" end def ms_find_in_batches(batch_size, &block) if (defined?(::ActiveRecord) && ancestors.include?(::ActiveRecord::Base)) || respond_to?(:find_in_batches) - find_in_batches(batch_size: batch_size, &block) + scope = respond_to?(:meilisearch_import) ? meilisearch_import : all + scope.find_in_batches(batch_size: batch_size, &block) elsif defined?(::Sequel::Model) && self < Sequel::Model dataset.extension(:pagination).each_page(batch_size, &block) else # don't worry, mongoid has its own underlying cursor/streaming mechanism items = []