Sha256: c4822072c1dcb41c8fae5d25866c0722716e7cebb70c5c7245921c3a3f27e56c

Contents?: true

Size: 1.92 KB

Versions: 27

Compression:

Stored size: 1.92 KB

Contents

# encoding: utf-8
#
require 'spec_helper'

describe Tokenizers::Index do
  
  before(:each) do
    @tokenizer = Tokenizers::Index.new
  end
  
  describe "default*" do
    before(:all) do
      @old = Tokenizers::Index.default
    end
    after(:all) do
      Tokenizers::Index.default = @old
    end
    it "has a reader" do
      lambda { Tokenizers::Index.default }.should_not raise_error
    end
    it "returns by default a new Index" do
      Tokenizers::Index.default.should be_kind_of(Tokenizers::Index)
    end
    it "has a writer" do
      lambda { Tokenizers::Index.default = :bla }.should_not raise_error
    end
    it "returns what has been written, if something has been written" do
      Tokenizers::Index.default = :some_default
      
      Tokenizers::Index.default.should == :some_default
    end
  end
  
  describe "remove_removes_characters" do
    it "should not remove ' from a query by default" do
      @tokenizer.remove_illegals("Lugi's").should == "Lugi's"
    end
  end

  describe "reject!" do
    it "should reject tokens if blank" do
      @tokenizer.reject(['', 'not blank', '']).should == ['not blank']
    end
  end
  
  describe "tokenize" do
    describe "normalizing" do
      def self.it_should_normalize_token(text, expected)
        it "should handle the #{text} case" do
          @tokenizer.tokenize(text).to_a.should == [expected].compact
        end
      end
      # defaults
      #
      it_should_normalize_token 'it_should_not_normalize_by_default', :it_should_not_normalize_by_default
    end
    describe "tokenizing" do
      def self.it_should_tokenize_token(text, expected)
        it "should handle the #{text} case" do
          @tokenizer.tokenize(text).to_a.should == expected
        end
      end
      # defaults
      #
      it_should_tokenize_token "splitting on \\s", [:splitting, :on, :"\\s"]
      it_should_tokenize_token 'und', [:und]
      it_should_tokenize_token '7',   [:'7']
    end
  end

end

Version data entries

27 entries across 27 versions & 1 rubygems

Version Path
picky-1.4.0 spec/lib/tokenizers/index_spec.rb
picky-1.3.4 spec/lib/tokenizers/index_spec.rb
picky-1.3.3 spec/lib/tokenizers/index_spec.rb
picky-1.3.2 spec/lib/tokenizers/index_spec.rb
picky-1.3.1 spec/lib/tokenizers/index_spec.rb
picky-1.3.0 spec/lib/tokenizers/index_spec.rb
picky-1.2.4 spec/lib/tokenizers/index_spec.rb
picky-1.2.3 spec/lib/tokenizers/index_spec.rb
picky-1.2.2 spec/lib/tokenizers/index_spec.rb
picky-1.2.1 spec/lib/tokenizers/index_spec.rb
picky-1.2.0 spec/lib/tokenizers/index_spec.rb
picky-1.1.7 spec/lib/tokenizers/index_spec.rb
picky-1.1.6 spec/lib/tokenizers/index_spec.rb
picky-1.1.5 spec/lib/tokenizers/index_spec.rb
picky-1.1.4 spec/lib/tokenizers/index_spec.rb
picky-1.1.3 spec/lib/tokenizers/index_spec.rb
picky-1.1.2 spec/lib/tokenizers/index_spec.rb
picky-1.1.1 spec/lib/tokenizers/index_spec.rb
picky-1.1.0 spec/lib/tokenizers/index_spec.rb
picky-1.0.0 spec/lib/tokenizers/index_spec.rb