lib/TextParser.rb in taskjuggler-0.0.3 vs lib/TextParser.rb in taskjuggler-0.0.4

- old
+ new

@@ -1,11 +1,11 @@ #!/usr/bin/env ruby -w # encoding: UTF-8 # # = TextParser.rb -- The TaskJuggler III Project Management Software # -# Copyright (c) 2006, 2007, 2008, 2009 by Chris Schlaeger <cs@kde.org> +# Copyright (c) 2006, 2007, 2008, 2009, 2010 by Chris Schlaeger <cs@kde.org> # # This program is free software; you can redistribute it and/or modify # it under the terms of version 2 of the GNU General Public License as # published by the Free Software Foundation. # @@ -69,16 +69,25 @@ # Create a new TextParser object. def initialize @rules = { } # Array to hold the token types that the scanner can return. @variables = [] + # An list of token types that are not allowed in the current context. + @badVariables = [] # The currently processed rule. @cr = nil - # If set to a value larger than 0 debug output will be generated. - @@debug = 30 end + # Limit the allowed tokens of the scanner to the subset passed by the + # _tokenSet_ Array. + def limitTokenSet(tokenSet) + return unless tokenSet + + @badVariables = @variables.dup + @badVariables.delete_if { |v| tokenSet.include?(v) } + end + # Call all methods that start with 'rule_' to initialize the rules. def initRules methods.each do |m| if m[0, 5] == 'rule_' # Create a new rule with the suffix of the function name as name. @@ -195,11 +204,11 @@ # list of the rule. For each rule pattern we store the transitions for this # pattern in a token -> rule hash. def getTransitions(rule) # If we have processed this rule before we can just return a copy # of the transitions of this rule. This avoids endless recursions. - return rule.transitions.clone unless rule.transitions.empty? + return rule.transitions.dup unless rule.transitions.empty? rule.transitions = [] rule.patterns.each do |pat| allTokensOptional = true transitions = { } @@ -239,11 +248,11 @@ end end end rule.transitions << transitions end - rule.transitions.clone + rule.transitions.dup end def checkRule(rule) if rule.patterns.empty? raise "Rule #{rule.name} must have at least one pattern" @@ -253,12 +262,12 @@ pat.each do |tok| type = tok[0] token = tok[1..-1] if type == ?$ if @variables.index(token).nil? - raise "Fatal Error: Illegal variable type #{token} used for " + - "rule #{rule.name} in pattern '#{pat}'" + error('unsupported_token', + "The token #{token} is not supported here.") end elsif type == ?! if @rules[token].nil? raise "Fatal Error: Reference to unknown rule #{token} in " + "pattern '#{pat}' of rule #{rule.name}" @@ -278,16 +287,11 @@ # the first iteration has been completed. repeatMode = false loop do # At the beginning of a rule we need a token from the input to determine # which pattern of the rule needs to be processed. - begin - token = nextToken - Log << "Token: [#{token[0]}][#{token[1]}]" - rescue TjException - error('parse_rule1', $!.message) - end + token = getNextToken # The scanner cannot differentiate between keywords and identifiers. So # whenever an identifier is returned we have to see if we have a # matching keyword first. If none is found, then look for normal # identifiers. @@ -345,21 +349,17 @@ # still have one and continue with the referenced rule. unless token.nil? returnToken(token) token = nil end - @stack.last.store(parseRule(@rules[elToken])) + @stack.last.store(parseRule(@rules[elToken]), + @scanner.sourceFileInfo) else # In case the element is a keyword or variable we have to get a new # token if we don't have one anymore. if token.nil? - begin - token = nextToken - Log << "Token: [#{token[0]}][#{token[1]}]" - rescue TjException - error('parse_rule2', $!.message) - end + token = getNextToken end if elType == ?_ # If the element requires a keyword the token must match this # keyword. @@ -369,11 +369,11 @@ unless @@expectedTokens.empty? text = "#{@@expectedTokens.join(', ')} or " + text end error('spec_keywork_expctd', text) end - @stack.last.store(elToken) + @stack.last.store(elToken, @scanner.sourceFileInfo) elsif elType == ?. if token != [ '.', '<END>' ] error('end_expected', 'End expected but found ' + "'#{token[1]}' (#{token[0]}).") end @@ -386,11 +386,11 @@ text = "#{@@expectedTokens.join(', ')} or " + text end error('spec_token_expctd', text) end # If the element is a variable store the value of the token. - @stack.last.store(token[1]) + @stack.last.store(token[1], @scanner.sourceFileInfo) end # The token has been consumed. Reset the variable. token = nil @@expectedTokens = [] end @@ -417,9 +417,23 @@ repeatMode = true end Log.exit('parseRule', "Finished rule #{rule.name}") return result + end + + def getNextToken + begin + token = nextToken + Log << "Token: [#{token[0]}][#{token[1]}]" + rescue TjException + error('parse_rule', $!.message) + end + if @badVariables.include?(token[0]) + error('unsupported_token', + "The token #{token[1]} is not supported in this context.") + end + token end end end