module Eco::Data::Locations NODE_PLAIN_ATTRS = %i[ row_num id name parent_id weight archived archive_token classifications classification_names ].freeze NodePlainStruct = Struct.new(*NODE_PLAIN_ATTRS) # Class to treat input csv in a form of a list of nodes, where parent is specified. class NodePlain < NodePlainStruct include Eco::Data::Locations::NodeBase require_relative 'node_plain/parsing' require_relative 'node_plain/serial' require_relative 'node_plain/builder' extend Eco::Data::Locations::NodePlain::Builder ALL_ATTRS = NODE_PLAIN_ATTRS ADDITIONAL_ATTRS = %i[row_num].freeze PROP_ATTRS = (ALL_ATTRS - ADDITIONAL_ATTRS).freeze def id clean_id(super, ref: "(Row: #{row_num}) ") end # backwards compatibility alias_method :tag, :id def name super || id end def parent_id clean_id(super, notify: false, ref: "(Row: #{row_num} - parent_id) ") end alias_method :parentId, :parent_id def archived value = super return false if value.nil? || value == false return true if value == true return false if value.to_s.strip.empty? return true if %w[yes x true].include?(value.downcase) false end def classifications into_a(super).map do |value| treat_classication(value) end end def classification_names into_a(super) end # @yield [node, json] optional custom serializer # @yieldparam node [Node] self # @yieldparam json [Hash] the default serialization # @yieldreturn [Hash] the serialized Node def node_hash(stringify_keys: true) json = to_h.reject {|key, _v| key == :row_num} json.transform_keys!(&:to_s) if stringify_keys json.merge!(yield(self, json)) if block_given? json end private def treat_classication(value) return value unless value.is_a?(String) value.strip.gsub(/\W+/, '').downcase end # Helper to convert to array def into_a(value) if value.is_a?(String) value.split('|') else [value].flatten end.compact end end end