lib/dentaku/tokenizer.rb in dentaku-3.3.0 vs lib/dentaku/tokenizer.rb in dentaku-3.3.1

- old
+ new

@@ -10,11 +10,11 @@ RPAREN = TokenMatcher.new(:grouping, :close) def tokenize(string, options = {}) @nesting = 0 @tokens = [] - @aliases = options.fetch(:aliases, Dentaku.aliases) + @aliases = options.fetch(:aliases, global_aliases) input = strip_comments(string.to_s.dup) input = replace_aliases(input) @case_sensitive = options.fetch(:case_sensitive, false) until input.empty? @@ -81,9 +81,14 @@ values = @aliases.values.flatten.join('|') /(?<=\p{Punct}|[[:space:]]|\A)(#{values})(?=\()/i end private + + def global_aliases + return {} unless Dentaku.respond_to?(:aliases) + Dentaku.aliases + end def fail!(reason, **meta) message = case reason when :parse_error