lib/intelli_agent/openai.rb in intelli_agent-0.2.3 vs lib/intelli_agent/openai.rb in intelli_agent-0.2.4
- old
+ new
@@ -2,30 +2,18 @@
BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL', 'gpt-4o-mini')
ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
MAX_TOKENS = ENV.fetch('OPENAI_MAX_TOKENS', 16_383).to_i
module ResponseExtender
- def content
- dig('choices', 0, 'message', 'content')
- end
+ def message = dig('choices', 0, 'message')
- def message
- dig('choices', 0, 'message')
- end
+ def content = dig('choices', 0, 'message', 'content')
+ def content? = !content.nil?
- def content?
- !content.nil?
- end
+ def tool_calls = dig('choices', 0, 'message', 'tool_calls')
+ def tool_calls? = !tool_calls.nil?
- def tool_calls
- dig('choices', 0, 'message', 'tool_calls')
- end
-
- def tool_calls?
- !tool_calls.nil?
- end
-
def functions
return if tool_calls.nil?
functions = tool_calls.filter { |tool| tool['type'].eql? 'function' }
return if functions.empty?
@@ -37,16 +25,14 @@
end
functions_list
end
- def functions?
- !functions.nil?
- end
+ def functions? = !functions.nil?
end
- def self.embed(input, model: 'text-embedding-3-large')
+ def self.embeddings(input, model: 'text-embedding-3-large')
response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
def response.embedding = dig('data', 0, 'embedding')
response
end
@@ -125,6 +111,6 @@
role, content = msg.first
{ role: role.to_s, content: content }
end
end
end
-end
\ No newline at end of file
+end