lib/dentaku/tokenizer.rb in dentaku-0.2.1 vs lib/dentaku/tokenizer.rb in dentaku-0.2.2
- old
+ new
@@ -5,12 +5,12 @@
module Dentaku
class Tokenizer
SCANNERS = [
TokenScanner.new(:whitespace, '\s+'),
TokenScanner.new(:numeric, '(\d+(\.\d+)?|\.\d+)', lambda{|raw| raw =~ /\./ ? raw.to_f : raw.to_i }),
- TokenScanner.new(:string, '"[^"]*"', lambda{|raw| raw.gsub(/^"|"$/, '') }),
- TokenScanner.new(:string, "'[^']*'", lambda{|raw| raw.gsub(/^'|'$/, '') }),
+ TokenScanner.new(:string, '"[^"]*"', lambda{|raw| raw.gsub(/^"|"$/, '') }),
+ TokenScanner.new(:string, "'[^']*'", lambda{|raw| raw.gsub(/^'|'$/, '') }),
TokenScanner.new(:operator, '\+|-|\*|\/', lambda do |raw|
case raw
when '+' then :add
when '-' then :subtract
when '*' then :multiply
@@ -33,21 +33,21 @@
when '<' then :lt
when '>' then :gt
when '=' then :eq
end
end),
- TokenScanner.new(:combinator, '(and|or)\b', lambda {|raw| raw.strip.to_sym }),
+ TokenScanner.new(:combinator, '(and|or)\b', lambda {|raw| raw.strip.downcase.to_sym }),
TokenScanner.new(:function, '(if|round)\b', lambda {|raw| raw.strip.to_sym }),
- TokenScanner.new(:identifier, '[A-Za-z_]+', lambda {|raw| raw.to_sym })
+ TokenScanner.new(:identifier, '[a-z_]+', lambda {|raw| raw.downcase.to_sym })
]
LPAREN = TokenMatcher.new(:grouping, :open)
RPAREN = TokenMatcher.new(:grouping, :close)
def tokenize(string)
nesting = 0
tokens = []
- input = string.dup.downcase
+ input = string.dup
until input.empty?
raise "parse error at: '#{ input }'" unless SCANNERS.any? do |scanner|
if token = scanner.scan(input)
raise "unexpected zero-width match (:#{ token.category }) at '#{ input }'" if token.length == 0