lib/dentaku/tokenizer.rb in dentaku-2.0.1 vs lib/dentaku/tokenizer.rb in dentaku-2.0.2

- old
+ new

@@ -2,10 +2,10 @@ require 'dentaku/token_matcher' require 'dentaku/token_scanner' module Dentaku class Tokenizer - LPAREN = TokenMatcher.new(:grouping, [:open, :fopen]) + LPAREN = TokenMatcher.new(:grouping, :open) RPAREN = TokenMatcher.new(:grouping, :close) def tokenize(string) @nesting = 0 @tokens = []