Class: Raif::Conversation

Constant Summary

Constants included from Raif::Concerns::LlmResponseParsing

Raif::Concerns::LlmResponseParsing::ASCII_CONTROL_CHARS

Instance Method Summary collapse

Methods included from Raif::Concerns::LlmResponseParsing

#parse_html_response, #parse_json_response, #parsed_response

Methods included from Raif::Concerns::HasAvailableModelTools

#available_model_tools_map

Methods included from Raif::Concerns::HasRequestedLanguage

#requested_language_name, #system_prompt_language_preference

Methods included from Raif::Concerns::HasLlm

#default_llm_model_key, #llm

Methods inherited from ApplicationRecord

table_name_prefix

Instance Method Details

#available_user_tool_classesObject



84
85
86
# File 'app/models/raif/conversation.rb', line 84

def available_user_tool_classes
  available_user_tools.map(&:constantize)
end

#build_system_promptObject



20
21
22
23
24
25
# File 'app/models/raif/conversation.rb', line 20

def build_system_prompt
  <<~PROMPT.strip
    #{system_prompt_intro}
    #{system_prompt_language_preference}
  PROMPT
end

#initial_chat_messageObject

i18n-tasks-use t(‘raif.conversation.initial_chat_message’)



33
34
35
# File 'app/models/raif/conversation.rb', line 33

def initial_chat_message
  I18n.t("#{self.class.name.underscore.gsub("/", ".")}.initial_chat_message")
end

#llm_messagesObject



67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
# File 'app/models/raif/conversation.rb', line 67

def llm_messages
  messages = []

  entries.oldest_first.includes(:raif_model_tool_invocations).each do |entry|
    messages << { "role" => "user", "content" => entry.user_message } unless entry.user_message.blank?
    next unless entry.completed?

    messages << { "role" => "assistant", "content" => entry.model_response_message } unless entry.model_response_message.blank?
    entry.raif_model_tool_invocations.each do |tool_invocation|
      messages << { "role" => "assistant", "content" => tool_invocation.as_llm_message }
      messages << { "role" => "assistant", "content" => tool_invocation.result_llm_message } if tool_invocation.result_llm_message.present?
    end
  end

  messages
end

#process_model_response_message(message:, entry:) ⇒ Object



61
62
63
64
65
# File 'app/models/raif/conversation.rb', line 61

def process_model_response_message(message:, entry:)
  # no-op by default.
  # Override in subclasses for type-specific processing of the model response message
  message
end

#prompt_model_for_entry_response(entry:, &block) ⇒ Object



37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
# File 'app/models/raif/conversation.rb', line 37

def prompt_model_for_entry_response(entry:, &block)
  update(system_prompt: build_system_prompt)

  llm.chat(
    messages: llm_messages,
    source: entry,
    response_format: response_format.to_sym,
    system_prompt: system_prompt,
    available_model_tools: available_model_tools,
    &block
  )
rescue StandardError => e
  Rails.logger.error("Error processing conversation entry ##{entry.id}. #{e.message}")
  entry.failed!

  if defined?(Airbrake)
    notice = Airbrake.build_notice(e)
    notice[:context][:component] = "raif_conversation"
    notice[:context][:action] = "prompt_model_for_entry_response"

    Airbrake.notify(notice)
  end
end

#system_prompt_introObject



27
28
29
30
# File 'app/models/raif/conversation.rb', line 27

def system_prompt_intro
  sp = Raif.config.conversation_system_prompt_intro
  sp.respond_to?(:call) ? sp.call(self) : sp
end