alexrudall/ruby-openai

I am stuck getting response from GPT into a thread.

leandrosardi opened this issue · 2 comments

I am not sure if this is a bug, or if I am doing something wrong.

The construction of the class Jarvis below creates an assistant, a thread, and a run.

The method chat pushes a new message and waits for a response, but the response never happens.

OPENAI_API_KEY = 'sk-************************'
OPENAI_MODEL = 'gpt-4-1106-preview' 

# Use this to initialize a new assistant.
OPENAI_INSTRUCTIONS = "Your name is Jarvis." 

require 'down' #, '~>5.4.1'
require 'openai' #, '~>6.3.1'
require 'adspower-client' #, '~>1.0.8'
require 'my-dropbox-api' #, '~>1.0.1'
require 'colorize' #, '~>0.8.1'
require 'pry' #, '~>0.14.1'
require 'blackstack-core' #, '~>1.2.15'
require 'blackstack-nodes' #, '~>1.2.12'
require 'simple_cloud_logging' #, '~>1.2.2'
require 'simple_command_line_parser' #, '~>1.1.2'

module BlackStack
    class Jarvis
        @@openai_api_key = nil
        @@openai_model = nil
        @@openai_client = nil
        @@openai_assistant_id = nil
        @@openai_thread_id = nil
        @@openai_run_id = nil
        @@openai_message_ids = []
        
        def initialize(h={})
            errors = []

            errors << ':openai_api_key is mandatory' if h[:openai_api_key].nil?
            errors << ':openai_model is mandatory' if h[:openai_model].nil?
            raise "Jarvis Initialization Error: #{errors.join(', ')}" if errors.size > 0

            @@openai_api_key = h[:openai_api_key] if h[:openai_api_key]
            @@openai_model = h[:openai_model] if h[:openai_model]
            @@openai_client = OpenAI::Client.new(access_token: @@openai_api_key)

            response = @@openai_client.assistants.create(
                parameters: {
                    model: @@openai_model,         # Retrieve via client.models.list. Assistants need 'gpt-3.5-turbo-1106' or later.
                    name: "Jarvis Assistant", 
                    description: nil,
                    instructions: OPENAI_INSTRUCTIONS,
                }
            )
            @@openai_assistant_id = response["id"]
            
            # Create thread
            response = @@openai_client.threads.create   # Note: Once you create a thread, there is no way to list it
                                                        # or recover it currently (as of 2023-12-10). So hold onto the `id` 
            @@openai_thread_id = response["id"]

            response = @@openai_client.runs.create(
                thread_id: @@openai_thread_id,
                parameters: {
                    assistant_id: @@openai_assistant_id
                }
            )
            @@openai_run_id = response['id']
        end # def initialize

        # for internal use only
        def chat(prompt)
            while true do    
                response = @@openai_client.runs.retrieve(id: @@openai_run_id, thread_id: @@openai_thread_id)
                status = response['status']
            
                print 'Status:'
                case status
                when 'queued', 'in_progress', 'cancelling'
                    puts 'Sleeping'
                    sleep 1 # Wait one second and poll again
                when 'completed'
                    puts 'Completed'
                    break # Exit loop and report result to user
                when 'requires_action'
                    puts 'Requires action'
                    # Handle tool calls (see below)
                when 'cancelled', 'failed', 'expired'
                    puts 'Canceled'
                    puts response['last_error'].inspect
                    break # or `exit`
                else
                    puts "Unknown status response: #{status}"
                end
            end # while

            # get the current number of messages
            n = @@openai_client.messages.list(thread_id: @@openai_thread_id)['data'].size
puts "#{n} messages in this thread"

            # create the new message
            mid = @@openai_client.messages.create(
                thread_id: @@openai_thread_id,
                parameters: {
                    role: "user", # Required for manually created messages
                    content: 'what is your name?' #prompt, # Required.
                }
            )["id"]
            @@openai_message_ids << mid
puts "one new message created"

            # wait for a response
            m = @@openai_client.messages.list(thread_id: @@openai_thread_id)['data'].size
            while m < n+2 # the message pushed by me + the response from the assistant
puts 'waiting for response... '
                sleep(1)
                m = @@openai_client.messages.list(thread_id: @@openai_thread_id)['data'].size
            end # while

            # 
            messages = @@openai_client.messages.list(thread_id: @@openai_thread_id) 
            messages['data'].last
        end # def chat

    end # class Jarvis
end # module BlackStack

client = BlackStack::Jarvis.new(
    openai_api_key: OPENAI_API_KEY,
    openai_model: OPENAI_MODEL,
)

p client.chat('What is your name?') # it should respond with "Jarvis"

The outbox looks like this:

Status:Sleeping
Status:Completed
1 messages in this thread
one new message created
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response... 
waiting for response...

I understood what was my mistake.

Here is the version of the working code:

require 'down' #, '~>5.4.1'
require 'openai' #, '~>6.3.1'
require 'adspower-client' #, '~>1.0.8'
require 'my-dropbox-api' #, '~>1.0.1'
require 'colorize' #, '~>0.8.1'
require 'pry' #, '~>0.14.1'
require 'blackstack-core' #, '~>1.2.15'
require 'blackstack-nodes' #, '~>1.2.12'
require 'simple_cloud_logging' #, '~>1.2.2'
require 'simple_command_line_parser' #, '~>1.1.2'

require 'config' # file in the project folder

module BlackStack
    class Jarvis
        @@openai_api_key = nil
        @@openai_model = nil
        @@openai_client = nil
        @@openai_assistant_id = nil
        @@openai_thread_id = nil
        @@openai_run_id = nil
        @@openai_message_ids = []
        
        def initialize(h={})
            errors = []

            errors << ':openai_api_key is mandatory' if h[:openai_api_key].nil?
            errors << ':openai_model is mandatory' if h[:openai_model].nil?
            raise "Jarvis Initialization Error: #{errors.join(', ')}" if errors.size > 0

            @@openai_api_key = h[:openai_api_key] if h[:openai_api_key]
            @@openai_model = h[:openai_model] if h[:openai_model]
            @@openai_client = OpenAI::Client.new(access_token: @@openai_api_key)

            response = @@openai_client.assistants.create(
                parameters: {
                    model: @@openai_model,         # Retrieve via client.models.list. Assistants need 'gpt-3.5-turbo-1106' or later.
                    name: "Jarvis Assistant", 
                    description: nil,
                    instructions: OPENAI_INSTRUCTIONS,
                    tools: [
                        { type: 'retrieval' },           # Allow access to files attached using file_ids
                        { type: 'code_interpreter' },    # Allow access to Python code interpreter 
                    ],
                    "metadata": { my_internal_version_id: '1.0.0' }
                }
            )
            @@openai_assistant_id = response["id"]
            
            # Create thread
            response = @@openai_client.threads.create   # Note: Once you create a thread, there is no way to list it
                                                        # or recover it currently (as of 2023-12-10). So hold onto the `id` 
            @@openai_thread_id = response["id"]
        end # def initialize

        # for internal use only
        def chat(prompt)
            # create the new message
            mid = @@openai_client.messages.create(
                thread_id: @@openai_thread_id,
                parameters: {
                    role: "user", # Required for manually created messages
                    content: prompt, # Required.
                }
            )["id"]
            @@openai_message_ids << mid

            # run the assistant
            response = @@openai_client.runs.create(
                thread_id: @@openai_thread_id,
                parameters: {
                    assistant_id: @@openai_assistant_id
                }
            )
            @@openai_run_id = response['id']

            while true do    
                response = @@openai_client.runs.retrieve(id: @@openai_run_id, thread_id: @@openai_thread_id)
                status = response['status']
            
                print 'Status:'
                case status
                when 'queued', 'in_progress', 'cancelling'
                    puts 'Sleeping'
                    sleep 1 # Wait one second and poll again
                when 'completed'
                    puts 'Completed'
                    break # Exit loop and report result to user
                when 'requires_action'
                    puts 'Requires action'
                    # Handle tool calls (see below)
                when 'cancelled', 'failed', 'expired'
                    puts 'Canceled'
                    puts response['last_error'].inspect
                    break # or `exit`
                else
                    puts "Unknown status response: #{status}"
                end
            end # while
            
            # 
            messages = @@openai_client.messages.list(thread_id: @@openai_thread_id) 
            messages['data'].first['content'].first['text']['value']
        end # def chat

    end # class Jarvis
end # module BlackStack

client = BlackStack::Jarvis.new(
    openai_api_key: OPENAI_API_KEY,
    openai_model: OPENAI_MODEL,
)

p client.chat('What is your name?') # it should respond with "Jarvis"
# => "My name is Jarvis. How can I assist you today?"

Closing ticket.