lib/rouge/lexers/javascript.rb in rouge-2.0.6 vs lib/rouge/lexers/javascript.rb in rouge-2.0.7

- old
+ new

@@ -158,10 +158,15 @@ token Punctuation push :ternary push :expr_start end + rule /(\@)(\w+)?/ do + groups Punctuation, Name::Decorator + push :expr_start + end + rule /[{}]/, Punctuation, :statement rule id do |m| if self.class.keywords.include? m[0] token Keyword @@ -239,55 +244,15 @@ # template strings state :template_string do rule /\${/, Punctuation, :template_string_expr rule /`/, Str::Double, :pop! - rule /(\\\\|\\[\$`]|[^\$`]|\$[^{])*/, Str::Double + rule /(\\\\|\\[\$`]|[^\$`]|\$(?!{))*/, Str::Double end state :template_string_expr do rule /}/, Punctuation, :pop! mixin :root - end - end - - class JSON < RegexLexer - desc "JavaScript Object Notation (json.org)" - tag 'json' - filenames '*.json' - mimetypes 'application/json', 'application/vnd.api+json', - 'application/hal+json' - - # TODO: is this too much of a performance hit? JSON is quite simple, - # so I'd think this wouldn't be too bad, but for large documents this - # could mean doing two full lexes. - def self.analyze_text(text) - return 0.8 if text =~ /\A\s*{/m && text.lexes_cleanly?(self) - end - - state :root do - rule /\s+/m, Text::Whitespace - rule /"/, Str::Double, :string - rule /(?:true|false|null)\b/, Keyword::Constant - rule /[{},:\[\]]/, Punctuation - rule /-?(?:0|[1-9]\d*)\.\d+(?:e[+-]\d+)?/i, Num::Float - rule /-?(?:0|[1-9]\d*)(?:e[+-]\d+)?/i, Num::Integer - end - - state :string do - rule /[^\\"]+/, Str::Double - rule /\\./, Str::Escape - rule /"/, Str::Double, :pop! - end - end - - class JSONDOC < JSON - desc "JavaScript Object Notation with extenstions for documentation" - tag 'json-doc' - - prepend :root do - rule %r(//.*?$), Comment::Single - rule /(\.\.\.)/, Comment::Single end end end end