Remove stale code

This commit is contained in:
Zach Gollwitzer
2025-03-24 09:25:49 -04:00
parent 2b23ecae31
commit 8fcb7b7dc7
8 changed files with 0 additions and 344 deletions

View File

@@ -1,63 +0,0 @@
import { Controller } from "@hotwired/stimulus"
// Connects to data-controller="ai-query"
export default class extends Controller {
static targets = ["input", "output", "form", "submit", "spinner"]
connect() {
this.resetOutput()
}
async query(event) {
event.preventDefault()
const query = this.inputTarget.value.trim()
if (!query) return
this.startLoading()
this.resetOutput()
try {
const response = await fetch(this.formTarget.action, {
method: "POST",
headers: {
"Content-Type": "application/json",
"X-CSRF-Token": document.querySelector("meta[name='csrf-token']").content
},
body: JSON.stringify({ query })
})
const data = await response.json()
if (data.success) {
this.outputTarget.textContent = data.response
} else {
this.outputTarget.textContent = `Error: ${data.response || "Something went wrong. Please try again."}`
}
} catch (error) {
console.error("AI Query error:", error)
this.outputTarget.textContent = "Error: Could not process your request. Please try again."
} finally {
this.stopLoading()
}
}
resetOutput() {
this.outputTarget.textContent = "Ask any question about your finances..."
}
startLoading() {
this.submitTarget.disabled = true
if (this.hasSpinnerTarget) {
this.spinnerTarget.classList.remove("hidden")
}
}
stopLoading() {
this.submitTarget.disabled = false
if (this.hasSpinnerTarget) {
this.spinnerTarget.classList.add("hidden")
}
}
}

View File

@@ -1,145 +0,0 @@
class ProcessAiResponseJob < ApplicationJob
queue_as :default
def perform(message)
chat = message.chat
user_message = message
# Debug mode: Log the start of processing
Ai::DebugMode.log_to_chat(chat, "🐞 DEBUG: Starting to process user query")
# Update chat title if it's the first user message
if chat.title == "New Chat" && chat.messages.where(role: "user").count == 1
new_title = user_message.content.truncate(30)
chat.update(title: new_title)
end
# Processing steps with progress updates
begin
# Step 1: Preparing request
update_thinking_indicator(chat, "Preparing request...")
sleep(0.5) # Small delay to show the progress
# Step 2: Analyzing query
update_thinking_indicator(chat, "Analyzing your question...")
sleep(0.5) # Small delay to show the progress
# Step 3: Generating response
update_thinking_indicator(chat, "Generating response...")
# Generate the actual response
response_content = generate_response(chat, user_message.content)
# Step 4: Finalizing
update_thinking_indicator(chat, "Finalizing response...")
sleep(0.5) # Small delay to show the progress
# Create AI response
ai_response = chat.messages.create!(
role: "assistant",
content: response_content
)
# Broadcast the response to the chat channel
Turbo::StreamsChannel.broadcast_append_to(
chat,
target: "messages",
partial: "messages/message",
locals: { message: ai_response }
)
rescue => e
Rails.logger.error("Error in ProcessAiResponseJob: #{e.message}")
Rails.logger.error(e.backtrace.join("\n"))
# Create an error message if something went wrong
error_message = chat.messages.create!(
role: "assistant",
content: "I'm sorry, I encountered an error while processing your request. Please try again later."
)
# Broadcast the error message
Turbo::StreamsChannel.broadcast_append_to(
chat,
target: "messages",
partial: "messages/message",
locals: { message: error_message }
)
ensure
# Hide the thinking indicator - use replace instead of update
Turbo::StreamsChannel.broadcast_replace_to(
chat,
target: "thinking",
html: '<div id="thinking" class="hidden"></div>'
)
# Reset the form
Turbo::StreamsChannel.broadcast_replace_to(
chat,
target: "message_form",
partial: "messages/form",
locals: { chat: chat, message: Message.new, scroll_behavior: true }
)
end
# Debug mode: Log completion
Ai::DebugMode.log_to_chat(chat, "🐞 DEBUG: Processing completed")
end
private
# Helper method to update the thinking indicator with progress
def update_thinking_indicator(chat, message)
Turbo::StreamsChannel.broadcast_replace_to(
chat,
target: "thinking-message",
partial: "messages/thinking_message",
locals: { message: message }
)
end
def generate_response(chat, user_message)
# Use our financial assistant for responses
begin
# Get the system message for context
system_message = chat.messages.find_by(role: "system")&.content
# Create a financial assistant for the user's family
family = chat.user.family
financial_assistant = Ai::FinancialAssistant.new(family).with_chat(chat)
# Log family information
Ai::DebugMode.log_to_chat(
chat,
"🐞 DEBUG: Using family data",
{
family_id: family.id,
currency: family.currency
}
)
# Process the query and get a response, passing the chat messages for context
response = financial_assistant.query(user_message, chat.messages)
response
rescue => e
error_message = "Error generating AI response: #{e.message}"
Rails.logger.error(error_message)
Rails.logger.error(e.backtrace.join("\n"))
# Debug mode: Log error details
# Limit the error message and backtrace to prevent payload size issues
truncated_message = e.message.to_s[0...1000]
truncated_backtrace = e.backtrace.first(5)
Ai::DebugMode.log_to_chat(
chat,
"🐞 DEBUG: Error encountered",
{
error: truncated_message,
backtrace: truncated_backtrace
}
)
"I'm sorry, I encountered an error while processing your request. Please try again later."
end
end
end

View File

@@ -1,46 +1,6 @@
module Chat::Debuggable
extend ActiveSupport::Concern
class_methods do
def debug_mode_enabled?
ENV["AI_DEBUG_MODE"] == "true"
end
# Log debug information to a chat
def log_to_chat(chat, message, data = nil)
return unless enabled?
# Store debug messages in the database but don't output to chat
content = message
if data.present?
# Limit the size of the JSON data to prevent PostgreSQL NOTIFY payload size limit errors
if data.is_a?(Hash) && data[:backtrace].is_a?(Array)
# Limit backtrace to first 3 entries to reduce payload size
data[:backtrace] = data[:backtrace].first(3)
end
# Convert to JSON and check size
json_data = JSON.pretty_generate(data)
# If still too large, truncate it (PostgreSQL NOTIFY has ~8000 byte limit)
if json_data.bytesize > 7000
json_data = json_data[0...7000] + "\n... (truncated due to size limits)"
end
content += "\n\n```json\n#{json_data}\n```"
end
chat.messages.create!(
role: "developer",
content: content,
)
end
end
def debug_mode_enabled?
self.class.debug_mode_enabled?
end
def debug_mode?
ENV["AI_DEBUG_MODE"] == "true"
end

View File

@@ -1,7 +0,0 @@
require "test_helper"
class AssistantResponseJobTest < ActiveJob::TestCase
# test "the truth" do
# assert true
# end
end

View File

@@ -1,7 +0,0 @@
require "test_helper"
class EnrichDataJobTest < ActiveJob::TestCase
# test "the truth" do
# assert true
# end
end

View File

@@ -1,68 +0,0 @@
require "test_helper"
class ProcessAiResponseJobTest < ActiveJob::TestCase
test "generates a response using FinancialAssistant" do
# Create a test user and chat
user = users(:family_admin)
chat = Chat.create!(user: user, title: "Test Chat")
# Create a system message
system_message = chat.messages.create!(
role: "developer",
content: "You are a helpful financial assistant.",
)
# Create a user message
user_message = chat.messages.create!(
role: "user",
content: "What is my net worth?",
)
# Mock the FinancialAssistant class
mock_assistant = mock
mock_assistant.expects(:with_chat).with(chat).returns(mock_assistant)
mock_assistant.expects(:query).with("What is my net worth?", chat.messages).returns("Your net worth is $100,000.")
FinancialAssistant.expects(:new).with(user.family).returns(mock_assistant)
# Run the job
assert_difference "Message.count", 1 do
ProcessAiResponseJob.perform_now(chat.id, user_message.id)
end
# Check the created message (should be the only assistant message)
response_message = chat.messages.where(role: "assistant").last
assert_not_nil response_message
assert_equal "assistant", response_message.role
assert_equal "Your net worth is $100,000.", response_message.content
assert_equal chat, response_message.chat
end
test "handles errors gracefully" do
# Create a test user and chat
user = users(:family_admin)
chat = Chat.create!(user: user, title: "Test Chat")
# Create a user message
user_message = chat.messages.create!(
role: "user",
content: "What is my net worth?",
)
# Mock the FinancialAssistant to raise an error
mock_assistant = mock
mock_assistant.expects(:with_chat).with(chat).returns(mock_assistant)
mock_assistant.expects(:query).raises(StandardError.new("Test error"))
FinancialAssistant.expects(:new).with(user.family).returns(mock_assistant)
# Run the job
assert_difference "Message.count", 1 do
ProcessAiResponseJob.perform_now(chat.id, user_message.id)
end
# Check the created message contains an error message (should be the only assistant message)
response_message = chat.messages.where(role: "assistant").last
assert_not_nil response_message
assert_equal "assistant", response_message.role
assert_match /I'm sorry, I encountered an error/, response_message.content
end
end

View File

@@ -1,7 +0,0 @@
require "test_helper"
class RevertImportJobTest < ActiveJob::TestCase
# test "the truth" do
# assert true
# end
end

View File

@@ -1,7 +0,0 @@
require "test_helper"
class UserPurgeJobTest < ActiveJob::TestCase
# test "the truth" do
# assert true
# end
end