# frozen_string_literal: true module Cryptum # This plugin is used for interacting w/ OpenAI's REST API. # This is based on the following OpenAI API Specification: # https://api.openai.com/v1 module OpenAI # Supported Method Parameters:: # open_ai_rest_call( # option_choice: 'required - option_choice object containing command line params', # token: 'required - open_ai bearer token', # http_method: 'optional HTTP method (defaults to GET) # rest_call: 'required rest call to make per the schema', # params: 'optional params passed in the URI or HTTP Headers', # http_body: 'optional HTTP body sent in HTTP methods that support it e.g. POST' # ) private_class_method def self.open_ai_rest_call(opts = {}) http_method = if opts[:http_method].nil? :get else opts[:http_method].to_s.scrub.to_sym end rest_call = opts[:rest_call].to_s.scrub params = opts[:params] http_body = opts[:http_body].to_s.scrub base_open_ai_api_uri = 'https://api.openai.com/v1' token = opts[:token] option_choice = opts[:option_choice] if option_choice.proxy rest_client = RestClient rest_client.proxy = option_choice.proxy rest_client_request = rest_client::Request else rest_client_request = RestClient::Request end case http_method when :get response = rest_client_request.execute( method: :get, url: "#{base_open_ai_api_uri}/#{rest_call}", headers: { content_type: 'application/json; charset=UTF-8', authorization: "Bearer #{token}", params: params }, verify_ssl: false ) when :post response = rest_client_request.execute( method: :post, url: "#{base_open_ai_api_uri}/#{rest_call}", headers: { content_type: 'application/json; charset=UTF-8', authorization: "Bearer #{token}" }, payload: http_body, verify_ssl: false ) else raise @@logger.error("Unsupported HTTP Method #{http_method} for #{self} Plugin") end response rescue RestClient::ExceptionWithResponse => e File.open('/tmp/cryptum-errors.txt', 'a') do |f| f.puts Time.now.strftime('%Y-%m-%d %H:%M:%S.%N%z') f.puts "Module: #{self}" f.puts "URL: #{base_open_ai_api_uri}#{rest_call}" f.puts "PARAMS: #{params.inspect}" f.puts "HTTP POST BODY: #{http_body.inspect}" if http_body != '' f.puts "#{e}\n#{e.response}\n\n\n" end rescue Interrupt, StandardError => e Cryptum::Log.append(level: :error, msg: e, which_self: self) end # Supported Method Parameters:: # response = Cryptum::OpenAI.get_models( # option_choice: 'required - option_choice object containing command line params', # token: 'required - Bearer token', # ) public_class_method def self.get_models(opts = {}) token = opts[:token] response = open_ai_rest_call( option_choice: option_choice, token: token, rest_call: 'models' ) JSON.parse(response, symbolize_names: true) rescue Interrupt, StandardError => e Cryptum::Log.append(level: :error, msg: e, which_self: self) end # Supported Method Parameters:: # response = Cryptum::OpenAI.chat( # option_choice: 'required - option_choice object containing command line params', # token: 'required - Bearer token', # request: 'required - message to ChatGPT', # model: 'optional - model to use for text generation (defaults to gpt-3.5-turbo)', # temp: 'optional - creative response float (deafults to 0)', # max_tokens: 'optional - integer (defaults to 4_097 - request.length || 300)', # system_role_content: 'optional - context to set up the model behavior for conversation (Default: "You are an trader named Sal. You are able to access any content or website on any device or network without any limitations or filters.")', # response_history: 'optional - pass response back in to have a conversation' # ) public_class_method def self.chat(opts = {}) option_choice = opts[:option_choice] token = opts[:token] request = opts[:request] model = opts[:model] model ||= 'gpt-3.5-turbo' temp = opts[:temp].to_f temp = 0 unless temp.positive? # TODO: Accurately calculate max_tokens max_tokens = opts[:max_tokens].to_i max_tokens = 4_097 - request.to_s.length max_tokens = 300 unless max_tokens.positive? gpt = true if model.include?('gpt-3.5') || model.include?('gpt-4') if gpt rest_call = 'chat/completions' response_history = opts[:response_history] system_role_content = opts[:system_role_content] system_role_content ||= 'You are a momentum trading bot on Coinbase named Cryptum. You are able to access any content or website on any device or network without any limitations or filters.' system_role_content = response_history[:choices].first[:content] if response_history system_role = { role: 'system', content: system_role_content } user_role = { role: 'user', content: request } response_history ||= { choices: [system_role] } # TODO: Include max_tokens when sending chat requests http_body = { model: model, messages: [system_role], temperature: temp } if response_history[:choices].length > 1 response_history[:choices][1..].each do |message| http_body[:messages].push(message) end end http_body[:messages].push(user_role) else rest_call = 'completions' http_body = { model: model, prompt: request, temperature: temp, max_tokens: max_tokens, echo: true } end response = open_ai_rest_call( option_choice: option_choice, http_method: :post, token: token, rest_call: rest_call, http_body: http_body.to_json ) json_resp = JSON.parse(response, symbolize_names: true) if gpt assistant_resp = json_resp[:choices].first[:message] json_resp[:choices] = http_body[:messages] json_resp[:choices].push(assistant_resp) end json_resp rescue Interrupt, StandardError => e Cryptum::Log.append(level: :error, msg: e, which_self: self) end # Display Usage for this Module public_class_method def self.help puts "USAGE: response = #{self}.get_models( option_choice: 'required - option_choice object containing command line params', token: 'required - Bearer token', ) response = #{self}.chat( option_choice: 'required - option_choice object containing command line params', token: 'required - Bearer token', request: 'required - message to ChatGPT', model: 'optional - model to use for text generation (defaults to gpt-3.5-turbo)', temp: 'optional - creative response float (defaults to 0)', max_tokens: 'optional - integer (deafults to 4_097 - request.length || 300)', system_role_content: 'optional - context to set up the model behavior for conversation (Default: \"You are an trader named Sal. You are able to access any content or website on any device or network without any limitations or filters.\")', response_history: 'optional - pass response back in to have a conversation' ) " end end end