lib/qlang/lexer/base.rb in qlang-0.0.27182000 vs lib/qlang/lexer/base.rb in qlang-0.0.27182100
- old
+ new
@@ -5,66 +5,83 @@
module Lexer
class Base
attr_accessor :lexeds
include Tokens
class << self
- attr_reader :token_hash
+ attr_reader :token_rule_hash
def rule(pattern, &token)
token ||= proc { :NULL }
- @token_hash ||= {}
- @token_hash[token.call] = pattern
+ @token_rule_hash ||= {}
+ @token_rule_hash[token.call] = pattern
end
end
def initialize(str)
ss = StringScanner.new(str)
@lexeds = []
until ss.eos?
- self.class.token_hash.each do |token, patter|
- if ss.scan(patter)
- (@lexeds << {token => ss[0]}) unless token == :NULL
- break
- end
+ scan_rslt, ss = scan(ss)
+ if scan_rslt
+ @lexeds << scan_rslt unless scan_rslt == :NULL
+ else
+ fail "I'm so sorry, something wrong. Please feel free to report this."
end
end
end
+ def scan(ss)
+ scan_rslt = nil
+ token_rule_hash.each do |token, patter|
+ if ss.scan(patter)
+ scan_rslt = (token == :NULL) ? :NULL : {token => ss[0], els: [ss[1],ss[2], ss[3]].compact }
+ break
+ end
+ end
+ [scan_rslt, ss]
+ end
+
# Accessor
## GET(without side effect)
+ def [](index)
+ @lexeds[index.to_i]
+ end
+
def get_value(num)
num = num.to_i
- @lexeds.map { |lexed| lexed.values.first }[num]
+ @lexeds[num].values.first
end
def token_str
@lexeds.map.with_index { |lexed, i| ":#{lexed.keys.first}#{i}" }.join
end
+ def token_rule_hash
+ self.class.token_rule_hash
+ end
+
## POST(with side effect, without idempotence.)
def parsed!(parsed, target)
case target
when Range
parsed_between!((target.first.to_i)..(target.last.to_i), parsed)
else
parsed_at!(target.to_i, parsed)
end
end
- def squash!(range, token: :CONT)
+ #squash!(range, token: :CONT)
+ def squash!(range, opts={token: :CONT})
+ token = opts[:token]
range = (range.first.to_i)..(range.last.to_i)
value = values[range].join
range.count.times { @lexeds.delete_at(range.first) }
@lexeds.insert(range.first, { token => value })
end
# Legacy Accessor
def values
@lexeds.map { |lexed| lexed.values.first }
- end
-
- def [](index)
- @lexeds[index]
end
private
def parsed_at!(token_position, parsed)
@lexeds.delete_at(token_position)