vendor/handlebars/spec/tokenizer_spec.rb in handlebars-0.3.2 vs vendor/handlebars/spec/tokenizer_spec.rb in handlebars-0.4.0

- old
+ new

@@ -49,10 +49,19 @@ result.should match_tokens(%w(OPEN ID CLOSE CONTENT CONTENT OPEN ID CLOSE)) result[4].should be_token("CONTENT", "{{bar}} ") end + it "supports escaping multiple delimiters" do + result = tokenize("{{foo}} \\{{bar}} \\{{baz}}") + result.should match_tokens(%w(OPEN ID CLOSE CONTENT CONTENT CONTENT)) + + result[3].should be_token("CONTENT", " ") + result[4].should be_token("CONTENT", "{{bar}} ") + result[5].should be_token("CONTENT", "{{baz}}") + end + it "supports escaping a triple stash" do result = tokenize("{{foo}} \\{{{bar}}} {{baz}}") result.should match_tokens(%w(OPEN ID CLOSE CONTENT CONTENT OPEN ID CLOSE)) result[4].should be_token("CONTENT", "{{{bar}}} ") @@ -121,36 +130,48 @@ result.should match_tokens(%w(CONTENT OPEN ID CLOSE CONTENT)) result[0].should be_token("CONTENT", "foo ") result[4].should be_token("CONTENT", " baz") end - it "tokenizes a partial as 'OPEN_PARTIAL ID CLOSE'" do + it "tokenizes a partial as 'OPEN_PARTIAL PARTIAL_NAME CLOSE'" do result = tokenize("{{> foo}}") - result.should match_tokens(%w(OPEN_PARTIAL ID CLOSE)) + result.should match_tokens(%w(OPEN_PARTIAL PARTIAL_NAME CLOSE)) end - it "tokenizes a partial with context as 'OPEN_PARTIAL ID ID CLOSE'" do + it "tokenizes a partial with context as 'OPEN_PARTIAL PARTIAL_NAME ID CLOSE'" do result = tokenize("{{> foo bar }}") - result.should match_tokens(%w(OPEN_PARTIAL ID ID CLOSE)) + result.should match_tokens(%w(OPEN_PARTIAL PARTIAL_NAME ID CLOSE)) end - it "tokenizes a partial without spaces as 'OPEN_PARTIAL ID CLOSE'" do + it "tokenizes a partial without spaces as 'OPEN_PARTIAL PARTIAL_NAME CLOSE'" do result = tokenize("{{>foo}}") - result.should match_tokens(%w(OPEN_PARTIAL ID CLOSE)) + result.should match_tokens(%w(OPEN_PARTIAL PARTIAL_NAME CLOSE)) end - it "tokenizes a partial space at the end as 'OPEN_PARTIAL ID CLOSE'" do + it "tokenizes a partial space at the end as 'OPEN_PARTIAL PARTIAL_NAME CLOSE'" do result = tokenize("{{>foo }}") - result.should match_tokens(%w(OPEN_PARTIAL ID CLOSE)) + result.should match_tokens(%w(OPEN_PARTIAL PARTIAL_NAME CLOSE)) end it "tokenizes a comment as 'COMMENT'" do result = tokenize("foo {{! this is a comment }} bar {{ baz }}") result.should match_tokens(%w(CONTENT COMMENT CONTENT OPEN ID CLOSE)) result[1].should be_token("COMMENT", " this is a comment ") end + it "tokenizes a block comment as 'COMMENT'" do + result = tokenize("foo {{!-- this is a {{comment}} --}} bar {{ baz }}") + result.should match_tokens(%w(CONTENT COMMENT CONTENT OPEN ID CLOSE)) + result[1].should be_token("COMMENT", " this is a {{comment}} ") + end + + it "tokenizes a block comment with whitespace as 'COMMENT'" do + result = tokenize("foo {{!-- this is a\n{{comment}}\n--}} bar {{ baz }}") + result.should match_tokens(%w(CONTENT COMMENT CONTENT OPEN ID CLOSE)) + result[1].should be_token("COMMENT", " this is a\n{{comment}}\n") + end + it "tokenizes open and closing blocks as 'OPEN_BLOCK ID CLOSE ... OPEN_ENDBLOCK ID CLOSE'" do result = tokenize("{{#foo}}content{{/foo}}") result.should match_tokens(%w(OPEN_BLOCK ID CLOSE CONTENT OPEN_ENDBLOCK ID CLOSE)) end @@ -184,10 +205,16 @@ result = tokenize("{{ foo bar \"baz\" }}") result.should match_tokens(%w(OPEN ID ID STRING CLOSE)) result[3].should be_token("STRING", "baz") end + it "tokenizes mustaches with String params using single quotes as 'OPEN ID ID STRING CLOSE'" do + result = tokenize("{{ foo bar \'baz\' }}") + result.should match_tokens(%w(OPEN ID ID STRING CLOSE)) + result[3].should be_token("STRING", "baz") + end + it "tokenizes String params with spaces inside as 'STRING'" do result = tokenize("{{ foo bar \"baz bat\" }}") result.should match_tokens(%w(OPEN ID ID STRING CLOSE)) result[3].should be_token("STRING", "baz bat") end @@ -196,10 +223,16 @@ result = tokenize(%|{{ foo "bar\\"baz" }}|) result.should match_tokens(%w(OPEN ID STRING CLOSE)) result[2].should be_token("STRING", %{bar"baz}) end + it "tokenizes String params using single quotes with escapes quotes as 'STRING'" do + result = tokenize(%|{{ foo 'bar\\'baz' }}|) + result.should match_tokens(%w(OPEN ID STRING CLOSE)) + result[2].should be_token("STRING", %{bar'baz}) + end + it "tokenizes numbers" do result = tokenize(%|{{ foo 1 }}|) result.should match_tokens(%w(OPEN ID INTEGER CLOSE)) result[2].should be_token("INTEGER", "1") end @@ -240,9 +273,23 @@ result.should match_tokens %w(OPEN ID ID ID EQUALS STRING ID EQUALS ID CLOSE) result = tokenize("{{foo omg bar=baz bat=\"bam\"}}") result.should match_tokens %w(OPEN ID ID ID EQUALS ID ID EQUALS STRING CLOSE) result[2].should be_token("ID", "omg") + end + + it "tokenizes special @ identifiers" do + result = tokenize("{{ @foo }}") + result.should match_tokens %w( OPEN DATA CLOSE ) + result[1].should be_token("DATA", "foo") + + result = tokenize("{{ foo @bar }}") + result.should match_tokens %w( OPEN ID DATA CLOSE ) + result[2].should be_token("DATA", "bar") + + result = tokenize("{{ foo bar=@baz }}") + result.should match_tokens %w( OPEN ID ID EQUALS DATA CLOSE ) + result[4].should be_token("DATA", "baz") end it "does not time out in a mustache with a single } followed by EOF" do Timeout.timeout(1) { tokenize("{{foo}").should match_tokens(%w(OPEN ID)) } end