lib/gen_ai/language/gemini.rb in gen-ai-0.4.0.alpha.2 vs lib/gen_ai/language/gemini.rb in gen-ai-0.4.0.alpha.3
- old
+ new
@@ -17,10 +17,10 @@
def complete(prompt, options = {}); end
def chat(messages, options = {})
response = client.post "/v1beta/models/gemini-pro:generateContent?key=#{@token}", {
contents: messages.map(&:deep_symbolize_keys!),
- generationConfig: options
+ generationConfig: options.except(:model)
}
build_result(model: 'gemini-pro', raw: response, parsed: extract_completions(response))
end