Sha256: ea314447e2fa10312897101169ef28f39196bffdb1adcdd00115635e6344c80b

Contents?: true

Size: 1.41 KB

Versions: 4

Compression:

Stored size: 1.41 KB

Contents

# frozen_string_literal: true

module SqlToCsvStream
  class JsonEnumerator
    JSON_COPY_OPTIONS = { format: 'TEXT' }.freeze

    def initialize(object, connection: PostgresqlCopyEnumerator.default_connection)
      sql = (object.respond_to?(:to_sql) ? object.to_sql : object.to_s).chomp(';')
      # The inspiration of this magic was:
      # https://dba.stackexchange.com/questions/90482/export-postgres-table-as-json?newreg=0b667caa47c34084bee6c90feec5e4be
      #
      # The idea is to stream with postgresql COPY command with the TEXT format.
      # To do this, we need to convert each row in the postgresql query result into JSON first.
      # This query does exactly that by using ROW_TO_JSON.
      # There is one edge case when text in that JSON contains backslashes -- to not break our
      # escaping, we need to manually add additional escape-backslashes.
      sql = "SELECT REGEXP_REPLACE(ROW_TO_JSON(t)::TEXT, '\\\\', '\\', 'g') FROM (#{sql}) AS t"
      @copy_enum = PostgresqlCopyEnumerator.new(sql, connection: connection, copy_options: JSON_COPY_OPTIONS)
    end

    def each(&stream)
      stream.yield('[')

      # do not prepend a comma on the very first item
      # to not break json syntax
      prepend_comma = false
      @copy_enum.each do |line|
        line = ',' + line if prepend_comma
        prepend_comma = true
        stream.yield(line.chomp)
      end

      stream.yield("]\n")
    end
  end
end

Version data entries

4 entries across 4 versions & 1 rubygems

Version Path
sql_to_csv_stream-0.3.0 lib/sql_to_csv_stream/json_enumerator.rb
sql_to_csv_stream-0.2.2 lib/sql_to_csv_stream/json_enumerator.rb
sql_to_csv_stream-0.2.1 lib/sql_to_csv_stream/json_enumerator.rb
sql_to_csv_stream-0.2.0 lib/sql_to_csv_stream/json_enumerator.rb