@using Berp; @helper CallProduction(ProductionRule production) { switch(production.Type) { case ProductionRuleType.Start: @: start_rule(context, :@production.RuleName); break; case ProductionRuleType.End: @: end_rule(context, :@production.RuleName); break; case ProductionRuleType.Process: @: build(context, token); break; } } @helper HandleParserError(IEnumerable expectedTokens, State state) { state_comment = "State: @state.Id - @Raw(state.Comment)" token.detach expected_tokens = ["@Raw(string.Join("\", \"", expectedTokens))"] error = token.eof? ? UnexpectedEOFException.new(token, expected_tokens, state_comment) : UnexpectedTokenException.new(token, expected_tokens, state_comment) raise error if (stop_at_first_error) add_error(context, error) return @state.Id} @helper MatchToken(TokenType tokenType) {match_@(tokenType)(context, token)} # This file is generated. Do not edit! Edit gherkin-ruby.razor instead. require 'gherkin/ast_builder' require 'gherkin/token_matcher' require 'gherkin/token_scanner' require 'gherkin/errors' module Gherkin RULE_TYPE = [ :None, @foreach(var rule in Model.RuleSet.Where(r => !r.TempRule)) { :@rule.Name.Replace("#", "_"), # @rule.ToString(true) } ] class ParserContext attr_reader :token_scanner, :token_matcher, :token_queue, :errors def initialize(token_scanner, token_matcher, token_queue, errors) @@token_scanner = token_scanner @@token_matcher = token_matcher @@token_queue = token_queue @@errors = errors end end class @Model.ParserClassName attr_accessor :stop_at_first_error def initialize(ast_builder=AstBuilder.new) @@ast_builder = ast_builder end def parse(token_scanner, token_matcher=TokenMatcher.new) token_scanner = token_scanner.is_a?(TokenScanner) ? token_scanner : TokenScanner.new(token_scanner) @@ast_builder.reset token_matcher.reset context = ParserContext.new( token_scanner, token_matcher, [], [] ) start_rule(context, :@Model.RuleSet.StartRule.Name); state = 0 token = nil begin token = read_token(context) state = match_token(state, token, context) end until(token.eof?) end_rule(context, :@Model.RuleSet.StartRule.Name) raise CompositeParserException.new(context.errors) if context.errors.any? get_result() end def build(context, token) handle_ast_error(context) do @@ast_builder.build(token) end end def add_error(context, error) context.errors.push(error) raise CompositeParserException, context.errors if context.errors.length > 10 end def start_rule(context, rule_type) handle_ast_error(context) do @@ast_builder.start_rule(rule_type) end end def end_rule(context, rule_type) handle_ast_error(context) do @@ast_builder.end_rule(rule_type) end end def get_result() @@ast_builder.get_result end def read_token(context) context.token_queue.any? ? context.token_queue.shift : context.token_scanner.read end @foreach(var rule in Model.RuleSet.TokenRules) { def match_@(rule.Name.Replace("#", ""))( context, token) @if (rule.Name != "#EOF") { @:return false if token.eof? } return handle_external_error(context, false) do context.token_matcher.match_@(rule.Name.Replace("#", ""))(token) end end } def match_token(state, token, context) case state @foreach(var state in Model.States.Values.Where(s => !s.IsEndState)) { @:when @state.Id @:match_token_at_@(state.Id)(token, context) } else raise InvalidOperationException, "Unknown state: #{state}" end end @foreach(var state in Model.States.Values.Where(s => !s.IsEndState)) { # @Raw(state.Comment) def match_token_at_@(state.Id)(token, context) @foreach(var transition in state.Transitions) { @:if @MatchToken(transition.TokenType) if (transition.LookAheadHint != null) { @:if lookahead_@(transition.LookAheadHint.Id)(context, token) } foreach(var production in transition.Productions) { @CallProduction(production) } @:return @transition.TargetState if (transition.LookAheadHint != null) { @:end } @:end } @HandleParserError(state.Transitions.Select(t => "#" + t.TokenType.ToString()).Distinct(), state) end } @foreach(var lookAheadHint in Model.RuleSet.LookAheadHints) { def lookahead_@(lookAheadHint.Id)(context, currentToken) currentToken.detach token = nil queue = [] match = false loop do token = read_token(context) token.detach queue.push(token) if (false @foreach(var tokenType in lookAheadHint.ExpectedTokens) {|| @MatchToken(tokenType)}) match = true break end break unless (false @foreach(var tokenType in lookAheadHint.Skip) {|| @MatchToken(tokenType)}) end context.token_queue.concat(queue) return match end } private def handle_ast_error(context, &action) handle_external_error(context, true, &action) end def handle_external_error(context, default_value, &action) return action.call if stop_at_first_error begin return action.call rescue CompositeParserException => e e.errors.each { |error| add_error(context, error) } rescue ParserException => e add_error(context, e) end default_value end end end