lib/logicality/lexer/regexp_lexer.rb in logicality-1.0.4 vs lib/logicality/lexer/regexp_lexer.rb in logicality-1.0.5
- old
+ new
@@ -1,46 +1,49 @@
+# frozen_string_literal: true
+
#
# Copyright (c) 2018-present, Blue Marble Payroll, LLC
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#
module Logicality
module Lexer
+ # This class is a simple lexical token analyzer based on regular expression grammer matchers.
class RegexpLexer
include Grammar
class << self
-
def invalid_pattern
"#{pattern}|(\\s*)"
end
def invalid_regexp
Regexp.new(invalid_pattern)
end
def pattern
- Grammar.constants.map { |c| Grammar.const_get(c).source }
- .join('|')
+ Grammar.constants
+ .map { |c| Grammar.const_get(c).source }
+ .join('|')
end
def regexp
Regexp.new(pattern)
end
-
end
attr_reader :expression
def initialize(expression)
- raise ArgumentError, 'Expression is required' unless expression && expression.to_s.length > 0
+ raise ArgumentError, 'Expression is required' unless expression &&
+ expression.to_s.length.positive?
@expression = expression.to_s
- if invalid_matches.length > 0
+ if invalid_matches.length.positive?
raise ArgumentError, "Invalid syntax: #{invalid_matches}"
end
reset
end
@@ -57,16 +60,14 @@
tokens = scan_array.map.with_index do |value, index|
const = Grammar.constants[index]
value ? Token.new(const, value) : nil
end.compact
- if tokens.length > 1
- raise ArgumentError, "Too many tokens found for: #{scan_array}"
- elsif tokens.length == 0
- raise ArgumentError, "Cannot tokenize: #{scan_array}"
- end
+ raise ArgumentError, "Too many tokens found for: #{scan_array}" if tokens.length > 1
+ raise ArgumentError, "Cannot tokenize: #{scan_array}" if tokens.length.zero?
+
tokens.first
end
def reset
@index = -1
@@ -89,9 +90,8 @@
end
def matches
@matches ||= expression.scan(self.class.regexp)
end
-
end
end
end