Sha256: 2f5e32a5db02f77693bed685ab25ccc74a5b7ff784158b9c668040ca9e7005bd

Contents?: true

Size: 1.32 KB

Versions: 4

Compression:

Stored size: 1.32 KB

Contents

module ActsAsTokenizable
  require 'acts_as_tokenizable/string_extensions'
  
  # default to_token method. needs to have a "name" property on the object.
  # override for more complex token generation
  def to_token
    raise NoMethodError.new("You must redefine to_token in your model. Example: self.name.to_token()")
  end
  
  #makes self.token=self.to_token, in a convoluted way
  def tokenize
    self.send("#{self.class.token_field_name}=", self.to_token)
  end
  
  module ClassMethods
    attr_accessor :token_field_name
    
    # search_token parameter is used by tokenized_by. This function allows for preparation
    # before tokenized_by function is invoked. Usually this means removing
    # stop words, replacing words.
    # By default it tokenizes each word and removes duplicates.
    def prepare_search_token(search_token)
      search_token.words_to_token
    end
  end
  
  def self.included(base)
    base.class_eval do
      extend ClassMethods
      
      named_scope :tokenized_by, lambda {|search_token|
        search_strings = []
        search_values = []
        prepare_search_token(search_token).words.each do |w|
          search_strings.push("#{token_field_name} LIKE ?")
          search_values.push("%#{w}%")
        end
        {:conditions => [search_strings.join(' AND '), *search_values]}
      }
    end
  end
end

Version data entries

4 entries across 4 versions & 1 rubygems

Version Path
acts_as_tokenizable-0.3.1 lib/acts_as_tokenizable/acts_as_tokenizable.rb
acts_as_tokenizable-0.3.0 lib/acts_as_tokenizable/acts_as_tokenizable.rb
acts_as_tokenizable-0.2.0 lib/acts_as_tokenizable/acts_as_tokenizable.rb
acts_as_tokenizable-0.1.0 lib/acts_as_tokenizable/acts_as_tokenizable.rb