Sha256: c182340c2d25d01c248be814c3449fc31ce0baa5ce3b4593b8d0af1f84e57317

Contents?: true

Size: 1.1 KB

Versions: 7

Compression:

Stored size: 1.1 KB

Contents

# frozen_string_literal: true
require "spec_helper"

describe GraphQL::Language::Lexer do
  subject { GraphQL::Language::Lexer }

  describe ".tokenize" do
    let(:query_string) {%|
      {
        query getCheese {
          cheese(id: 1) {
            ... cheeseFields
          }
        }
      }
    |}
    let(:tokens) { subject.tokenize(query_string) }

    it "keeps track of previous_token" do
      assert_equal tokens[0], tokens[1].prev_token
    end

    it "unescapes escaped characters" do
      assert_equal "\" \\ / \b \f \n \r \t", subject.tokenize('"\\" \\\\ \\/ \\b \\f \\n \\r \\t"').first.to_s
    end

    it "unescapes escaped unicode characters" do
      assert_equal "\t", subject.tokenize('"\\u0009"').first.to_s
    end

    it "rejects bad unicode, even when there's good unicode in the string" do
      assert_equal :BAD_UNICODE_ESCAPE, subject.tokenize('"\\u0XXF \\u0009"').first.name
    end

    it "clears the previous_token between runs" do
      tok_1 = subject.tokenize(query_string)
      tok_2 = subject.tokenize(query_string)
      assert_equal nil, tok_2[0].prev_token
    end
  end
end

Version data entries

7 entries across 7 versions & 1 rubygems

Version Path
graphql-1.4.5 spec/graphql/language/lexer_spec.rb
graphql-1.4.4 spec/graphql/language/lexer_spec.rb
graphql-1.4.3 spec/graphql/language/lexer_spec.rb
graphql-1.4.2 spec/graphql/language/lexer_spec.rb
graphql-1.4.1 spec/graphql/language/lexer_spec.rb
graphql-1.4.0 spec/graphql/language/lexer_spec.rb
graphql-1.3.0 spec/graphql/language/lexer_spec.rb