# Copyright (c) 2022 Contrast Security, Inc. See https://www.contrastsecurity.com/enduser-terms-0317a for more details.
# frozen_string_literal: true

require 'contrast/utils/object_share'
require 'contrast/agent/protect/rule/unsafe_file_upload'
require 'contrast/agent/protect/rule/unsafe_file_upload/unsafe_file_upload_matcher'
require 'contrast/agent/protect/input_analyzer/input_analyzer'
require 'contrast/utils/input_classification'

module Contrast
  module Agent
    module Protect
      module Rule
        # This module will do the Input Classification stage of Unsafe File Upload
        # rule. As a result input would be marked as DEFINITEATTACK or IGNORE.
        module UnsafeFileUploadInputClassification
          # UNSAFE_UPLOAD_MATCH = 'unsafe-file-upload-input-tracing-v1'
          #
          # class << self
          #   include InputClassificationBase
          #   include Contrast::Agent::Protect::Rule::UnsafeFileUploadMatcher
          #
          #   # Input Classification stage is done to determine if an user input is
          #   # DEFINITEATTACK or to be ignored.
          #   #
          #   # @param input_type [Contrast::Agent::Reporting::InputType] The type of the user input.
          #   # @param value [String, Array<String>] the value of the input.
          #   # @param input_analysis [Contrast::Agent::Reporting::InputAnalysis] Holds all the results from the
          #   #                                                       agent analysis from the current
          #   #                                                       Request.
          #   # @return ia [Contrast::Agent::Reporting::InputAnalysis] with updated results.
          #   def classify input_type, value, input_analysis
          #     unless Contrast::Agent::Protect::Rule::UnsafeFileUpload::APPLICABLE_USER_INPUTS.include?(input_type)
          #       return
          #     end
          #     return unless input_analysis.request
          #
          #     rule_id = Contrast::Agent::Protect::Rule::UnsafeFileUpload::NAME
          #     results = []
          #
          #     Array(value).each do |val|
          #       Array(val).each do |v|
          #         results << create_new_input_result(input_analysis.request, rule_id, input_type, v)
          #       end
          #     end
          #
          #     input_analysis.results = results
          #     input_analysis
          #   end
          #
          #   private
          #
          #   # This methods checks if input is tagged DEFINITEATTACK or IGNORE matches value with it's
          #   # key if needed and Creates new isntance of InputAnalysisResult.
          #   #
          #   # @param request [Contrast::Agent::Request] the current request context.
          #   # @param rule_id [String] The name of the Protect Rule.
          #   # @param input_type [Contrast::Agent::Reporting::InputType] The type of the user input.
          #   # @param value [String, Array<String>] the value of the input.
          #   #
          #   # @return res [Contrast::Agent::Reporting::InputAnalysisResult]
          #   def create_new_input_result request, rule_id, input_type, value
          #     ia_result = new_ia_result rule_id, input_type, request.path, value
          #     if unsafe_match? value
          #       ia_result.score_level = DEFINITEATTACK
          #       ia_result.ids << UNSAFE_UPLOAD_MATCH
          #     else
          #       ia_result.score_level = IGNORE
          #     end
          #     ia_result.key = if input_type == MULTIPART_FIELD_NAME
          #                       Contrast::Agent::Protect::InputAnalyzer::DISPOSITION_FILENAME
          #                     else
          #                       Contrast::Agent::Protect::InputAnalyzer::DISPOSITION_NAME
          #                     end
          #     ia_result
          #   end
          # end
        end
      end
    end
  end
end