spec/functional/tokenizer_spec.rb in picky-4.6.3 vs spec/functional/tokenizer_spec.rb in picky-4.6.4

- old
+ new

@@ -3,10 +3,10 @@ require 'spec_helper' describe Picky::Tokenizer do describe 'examples' do it 'works correctly' do - tokenizer = described_class.new(split_words_on: /\&/, normalizes_words: [[/\&/, 'and']]) + tokenizer = described_class.new(normalizes_words: [[/\&/, 'and']]) # Is this really correct? Shouldn't we split after normalizing? # # Yes – we split using more information. # \ No newline at end of file