require "pg_graph" require "fixture_fox/version" require "fixture_fox/error.rb" require "fixture_fox/token.rb" require "fixture_fox/line.rb" require "fixture_fox/ast.rb" require "fixture_fox/idr.rb" require "fixture_fox/tokenizer.rb" require "fixture_fox/parser.rb" require "fixture_fox/hash_parser.rb" require "fixture_fox/analyzer.rb" require "fixture_fox/anchor.rb" module FixtureFox # Fox is compiled incrementally so you can add some files, parse them into an # AST, inspect the result through #idr, and then add some more files. The #idr # can also be regenerated by executing #call with a different set of seed table IDs class Fox # The type of the database (PgGraph::Type::Database object) attr_reader :type # Name of first source file attr_reader :file # Names of all source files except included files. FIXME: Not maintained - always [] attr_reader :files # Tokenized lines. Note that this doesn't include lines from included # files attr_reader :lines # The AST object. The AST object is a snapshot and grows as more files are # parsed attr_reader :ast # The Analyzer object. The analyzer object is recalculated after new files # are added attr_reader :analyzer # The IDR object. It is reset every time a new file is added def idr() @idr || generate end # List of tables with records (PgGraph::Type::Table) def tables() @analyzer.data_tables end # Default schema. Defaults to "public" attr_reader :schema # Map from qualified table name to max ID for that table. This is the same # as the number of records in the table if the IDs wasn't seeded with a # start value in #initialize attr_reader :ids # Anchors object. #anchors is modified as more and more files are added attr_reader :anchors # List of anchors defined by the sources def defined_anchors() @analyzer.defined_anchors.values end # List of external anchors referenced by the sources. FIXME: Unused def referenced_anchors() @analyzer.referenced_anchors.values end def parsed?() !@ast.nil? end def assigned?() !@analyzer.nil? && @analyzer.assigned? end def checked?() assigned? && @analyzer.checked? end def analyzed?() assigned? && checked? end def generated?() !@idr.nil? end # TODO: Rephrase in terms of Analyzer#generated? # Returns true if the Fox object is frozen and no more files can be added def frozen?() @frozen end # Freeze the Fox object. After this no more files can be added def freeze!() @frozen = true end def initialize(type, files = [], schema: nil, ids: nil, anchors: nil) constrain type, PgGraph::Type constrain files, [String] constrain ids, { String => Integer }, nil constrain anchors, Anchors, [Hash], NilClass @type = type @files = [] @schema = schema || "public" @ids = ids || {} if anchors.is_a?(Array) @anchors = Anchors.new(type, anchors) else @anchors = anchors || Anchors.new(type) end @lines = [] @ast = nil @analyzer = nil @idr = nil @data = nil @frozen = false if !files.empty? compile(files) assign_types # Analyze as far as possible without requiring anchors & ids freeze! end end # Note: Doesn't dup the Ast or other internal data structures except ids # and anchors def dup generate if !generated? Fox.new(type) end def compile(*file_or_texts) !frozen? or raise Error, "Frozen Fox object" @analyzer = nil @idr = nil @data = nil @ast ||= Ast.new Array(file_or_texts).flatten.each { |source| source, lines = tokenize(source) parse(source, lines) } end def tokenize(source) # puts "tokenize(#{source.inspect})" tokenizer = Tokenizer.new(source) @file ||= tokenizer.file @files.append tokenizer.file lines = tokenizer.call @lines.append(*lines) [source, lines] end def parse(source, lines) # puts "parse(#{source.inspect})" parser = Parser.new(source, lines, schema: @schema) @ast = parser.call(@ast) parser.anchor_files.each { |file| @anchors.merge!(Anchors.load(@type, file)) } end def analyze(anchors: nil) # puts "analyze" constrain anchors, Anchors, NilClass assign_types if !assigned? check_types(anchors: anchors) if !checked? end def assign_types # puts "assign_types" @analyzer = Analyzer.new(@type, @ast, ids: ids, anchors: anchors) @ids = @analyzer.ids @anchors = @analyzer.anchors @analyzer.assign_types end def check_types(anchors: nil) # puts "check_types" constrain anchors, Anchors, NilClass assign_types if !assigned? @analyzer.check_types(anchors: anchors) end def generate(anchors: nil, ids: nil) # puts "generate" constrain anchors, Anchors, NilClass analyze(anchors: anchors) if !analyzed? @idr = @analyzer.generate(ids: ids) end # PgGraph::Data::Database object of the Idr object def data(anchors: nil, ids: nil) constrain anchors, Anchors, NilClass generate(anchors: anchors, ids: ids) if !generated? @data = PgGraph::Data.new(type, idr.to_h) end def write_state(file) self.class.write_state(ids, anchors, file) end def to_yaml data.to_yaml.to_yaml end def to_sql(format: :psql, ids: nil, delete: :all) if [:touched, :recursive, :all].include?(delete) # Ugly quick-fix because pg_graph doens't to dependencies(!) data.to_sql(format: format, ids: ids || {}, delete: :none, truncate: delete, files: files) else data.to_sql(format: format, ids: ids || {}, delete: delete, files: files) end end def self.read_ids(file) YAML.load(IO.read(file)) end def self.write_ids(ids, file) IO.write(file, YAML.dump(ids)) end def self.read_anchors(type, file) Anchors.load(type, file) end def self.write_anchors(anchors, file) anchors.save(file) end def self.read_state(type, file) if File.size?(file) state = YAML.load(IO.read(file)) [state[:ids], Anchors.new(type, state[:anchors])] else [nil, nil] end end def self.write_state(ids, anchors, file) IO.write(file, YAML.dump({ ids: ids, anchors: anchors.to_yaml })) end end end