lib/intelli_agent/openai.rb in intelli_agent-0.1.0 vs lib/intelli_agent/openai.rb in intelli_agent-0.1.1
- old
+ new
@@ -1,9 +1,9 @@
# In the future, this became a bus to more than one AI provider
module IntelliAgent::OpenAI
- BASIC_MODEL = 'gpt-4o-mini' # ENV.fetch('OPENAI_BASIC_MODEL')
- ADVANCED_MODEL = 'gpt-4o' # ENV.fetch('OPENAI_ADVANCED_MODEL')
+ BASIC_MODEL = ENV.fetch('OPENAI_BASIC_MODEL', 'gpt-4o-mini')
+ ADVANCED_MODEL = ENV.fetch('OPENAI_ADVANCED_MODEL', 'gpt-4o-2024-08-06')
def self.embed(input, model: 'text-embedding-3-large')
response = OpenAI::Client.new.embeddings(parameters: { input:, model: })
response.dig('data', 0, 'embedding')
end
@@ -14,11 +14,16 @@
parameters = { model:, messages: [{ role: 'user', content: prompt }] }
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
response = OpenAI::Client.new.chat(parameters:)
- response.dig('choices', 0, 'message', 'content').strip
+
+ if response_format.nil?
+ response.dig('choices', 0, 'message', 'content').strip
+ else
+ response
+ end
end
def self.vision(prompt:, image_url:, model: :advanced, response_format: nil)
model = select_model(model)
messages = [{ type: :text, text: prompt },
@@ -27,11 +32,15 @@
parameters = { model: model, messages: [{ role: :user, content: messages }] }
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
response = OpenAI::Client.new.chat(parameters:)
- response.dig('choices', 0, 'message', 'content').strip
+ if response_format.nil?
+ response.dig('choices', 0, 'message', 'content').strip
+ else
+ response
+ end
end
def self.single_chat(system:, user:, model: :basic, response_format: nil)
model = select_model(model)
parameters = { model:,
@@ -41,19 +50,27 @@
] }
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
response = OpenAI::Client.new.chat(parameters:)
- response.dig('choices', 0, 'message', 'content').strip
+ if response_format.nil?
+ response.dig('choices', 0, 'message', 'content').strip
+ else
+ response
+ end
end
def self.chat(messages:, model: :basic, response_format: nil)
model = select_model(model)
parameters = { model:, messages: }
parameters[:response_format] = { type: 'json_object' } if response_format.eql?(:json)
response = OpenAI::Client.new.chat(parameters:)
- response.dig('choices', 0, 'message', 'content').strip
+ if response_format.nil?
+ response.dig('choices', 0, 'message', 'content').strip
+ else
+ response
+ end
end
def self.models = OpenAI::Client.new.models.list
def self.select_model(model)
\ No newline at end of file