lib/qlang/lexer/base.rb in qlang-0.0.27180000 vs lib/qlang/lexer/base.rb in qlang-0.0.27182000
- old
+ new
@@ -1,9 +1,8 @@
require 'strscan'
require 'qlang/lexer/tokens'
-
module Qlang
module Lexer
class Base
attr_accessor :lexeds
include Tokens
@@ -28,90 +27,59 @@
end
end
end
end
- def get_value(token_with_num)
- num = to_num(token_with_num)
- values[num]
+ # Accessor
+ ## GET(without side effect)
+ def get_value(num)
+ num = num.to_i
+ @lexeds.map { |lexed| lexed.values.first }[num]
end
- def squash_with_prn(token_with_num, value)
- num = to_num(token_with_num)
- 3.times do
- @lexeds.delete_at(num - 1)
- end
- @lexeds.insert(num - 1, {R: ":%|#{value}|%:"})
+ def token_str
+ @lexeds.map.with_index { |lexed, i| ":#{lexed.keys.first}#{i}" }.join
end
- def squash_to_cont(token_with_num, count)
- num = to_num(token_with_num)
- value = ''
- count.times do
- value += values[num]
- @lexeds.delete_at(num)
+ ## POST(with side effect, without idempotence.)
+ def parsed!(parsed, target)
+ case target
+ when Range
+ parsed_between!((target.first.to_i)..(target.last.to_i), parsed)
+ else
+ parsed_at!(target.to_i, parsed)
end
- @lexeds.insert(num, {CONT: value})
end
- def ch_token(token_with_num, token)
- num = to_num(token_with_num)
- before_hash = @lexeds.delete_at(num)
- @lexeds.insert(num, {token => before_hash.values.first})
+ def squash!(range, token: :CONT)
+ range = (range.first.to_i)..(range.last.to_i)
+ value = values[range].join
+ range.count.times { @lexeds.delete_at(range.first) }
+ @lexeds.insert(range.first, { token => value })
end
- def tokens
- @lexeds.map { |lexed| lexed.keys.first }
- end
-
- def token_with_nums
- @lexeds.map.with_index { |lexed, i| ":#{lexed.keys.first}#{i}" }
- end
-
- def ch_value(token_with_num, value)
- num = to_num(token_with_num)
- before_hash = @lexeds.delete_at(num)
- @lexeds.insert(num, {before_hash.keys.first => value })
- end
-
+ # Legacy Accessor
def values
@lexeds.map { |lexed| lexed.values.first }
end
- def token_str
- token_with_nums.join
- end
-
def [](index)
@lexeds[index]
end
- def split(separator)
- values.chunk { |e| e == separator }.reject { |sep, _| sep }.map { |_, ans| ans }
- end
+ private
+ def parsed_at!(token_position, parsed)
+ @lexeds.delete_at(token_position)
+ @lexeds.insert(token_position, { R: parsed })
+ end
- def fix_r_txt!
- @lexeds.map! do |hash|
- if value = (hash[:R] || hash[:CONT])
- ary = hash.first
- ary[1] = value.gsub(/:%\|/,'').gsub(/\|%:/,'')
- hash = Hash[*ary]
+ def parsed_between!(token_range, parsed)
+ start_pos = token_range.first
+ token_range.count.times do
+ @lexeds.delete_at(start_pos)
end
- hash
+ @lexeds.insert(start_pos, { R: parsed })
end
- end
- # NEW APIs
- def parsed!(token_position, parsed)
- @lexeds.delete_at(token_position)
- @lexeds.insert(token_position, { R: parsed })
- end
-
- private
-
- def to_num(token_with_num)
- token_with_num =~ /\d+/
- $&.to_i
- end
end
end
end