lib/qlang/lexer/base.rb in qlang-0.0.27182110 vs lib/qlang/lexer/base.rb in qlang-0.0.27182120

- old
+ new

@@ -10,51 +10,56 @@ attr_reader :token_rule_hash def rule(pattern, &token) token ||= proc { :NULL } @token_rule_hash ||= {} - @token_rule_hash[token.call] = pattern + @token_rule_hash[pattern] = token.call end end def initialize(str) ss = StringScanner.new(str) @lexeds = [] until ss.eos? scan_rslt, ss = scan(ss) if scan_rslt - @lexeds << scan_rslt unless scan_rslt == :NULL + @lexeds << scan_rslt unless scan_rslt[:token] == :NULL else fail "I'm so sorry, something wrong. Please feel free to report this." end end end def scan(ss) scan_rslt = nil - token_rule_hash.each do |token, patter| - if ss.scan(patter) - scan_rslt = (token == :NULL) ? :NULL : {token => ss[0], els: [ss[1],ss[2], ss[3]].compact } + token_rule_hash.each do |pattern, token| + if ss.scan(pattern) + scan_rslt = { + token: token, + value: ss[0], + els: 4.times.inject([]) { |s,i|s << ss[i+1] }.compact + } break end end [scan_rslt, ss] end # Accessor ## GET(without side effect) - def [](index) - @lexeds[index.to_i] + def get_value(num) + num = num.to_i + @lexeds[num][:value] end - def get_value(num) + def get_els(num) num = num.to_i - @lexeds[num].values.first + @lexeds[num][:els] end def token_str - @lexeds.map.with_index { |lexed, i| ":#{lexed.keys.first}#{i}" }.join + @lexeds.map.with_index { |lexed, i| ":#{lexed[:token]}#{i}" }.join end def token_rule_hash self.class.token_rule_hash end @@ -73,29 +78,29 @@ def squash!(range, opts={token: :CONT}) token = opts[:token] range = (range.first.to_i)..(range.last.to_i) value = values[range].join range.count.times { @lexeds.delete_at(range.first) } - @lexeds.insert(range.first, { token => value }) + @lexeds.insert(range.first, { token: token, value: value }) end # Legacy Accessor def values - @lexeds.map { |lexed| lexed.values.first } + @lexeds.map { |lexed| lexed[:value] } end private def parsed_at!(token_position, parsed) @lexeds.delete_at(token_position) - @lexeds.insert(token_position, { R: parsed }) + @lexeds.insert(token_position, { token: :R, value: parsed }) end def parsed_between!(token_range, parsed) start_pos = token_range.first token_range.count.times do @lexeds.delete_at(start_pos) end - @lexeds.insert(start_pos, { R: parsed }) + @lexeds.insert(start_pos, { token: :R, value: parsed }) end end end end