From b6045ef221fc894e6007cbf6674a81e9833565f4 Mon Sep 17 00:00:00 2001 From: Eric Pugh Date: Tue, 14 Jan 2025 13:59:13 -0500 Subject: [PATCH] First test --- app/services/llm_service.rb | 47 +++++++++++++++++++++++++++++++ test/services/llm_service_test.rb | 33 ++++++++++++++++++++++ 2 files changed, 80 insertions(+) create mode 100644 test/services/llm_service_test.rb diff --git a/app/services/llm_service.rb b/app/services/llm_service.rb index dcebf3b2a..d05d2bdab 100644 --- a/app/services/llm_service.rb +++ b/app/services/llm_service.rb @@ -1,14 +1,61 @@ # frozen_string_literal: true +require 'net/http' +require 'json' + class LlmService def initialize openai_key, _opts = {} @openai_key = openai_key end def make_judgement _system_prompt, _user_prompt + # scott write code. + { explanation: 'Hi scott', rating: rand(4), } end + + # rubocop:disable Metrics/MethodLength + def get_llm_response user_prompt, system_prompt + uri = URI('https://api.openai.com/v1/chat/completions') + headers = { + 'Content-Type' => 'application/json', + 'Authorization' => "Bearer #{@openai_key}", + } + body = { + model: 'gpt-4', + messages: [ + { role: 'system', content: system_prompt }, + { role: 'user', content: user_prompt } + ], + } + response = Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |http| + request = Net::HTTP::Post.new(uri, headers) + request.body = body.to_json + http.request(request) + end + if response.is_a?(Net::HTTPSuccess) + json_response = JSON.parse(response.body) + # puts json_response + content = json_response['choices']&.first&.dig('message', 'content') + # puts content + parsed_content = begin + JSON.parse(content) + rescue StandardError + {} + end + + # puts "here is parsed" + # puts parsed_content + { + explanation: parsed_content['response']['explanation'], + judgment: parsed_content['response']['judgment_value'], + } + else + raise "Error: #{response.code} - #{response.message}" + end + end + # rubocop:enable Metrics/MethodLength end diff --git a/test/services/llm_service_test.rb b/test/services/llm_service_test.rb new file mode 100644 index 000000000..8c0261214 --- /dev/null +++ b/test/services/llm_service_test.rb @@ -0,0 +1,33 @@ +# frozen_string_literal: true + +require 'test_helper' + +class LlmServiceTest < ActiveSupport::TestCase + let(:user) { users(:judge_judy) } + let(:service) { LlmService.new user.openai_key, {} } + + + let(:score_data) do + { + all_rated: [ true, false ].sample, + queries: {}, + score: (1..100).to_a.sample, + try_number: the_try.try_number, + user_id: user.id, + } + end + + describe 'Hacking with Scott' do + test 'can we make it run' do + WebMock.allow_net_connect! + user_prompt = 'Explain why you chose a judgment of 3.' + system_prompt = 'Provide a JSON response with an explanation and a judgment value.' + result = service.get_llm_response(user_prompt, system_prompt) + puts result + + assert_equal 3, result[:judgment] + + WebMock.disable_net_connect! + end + end +end