syncmaster8 / debug_ask_ai_feature.py
aseelflihan's picture
fix
fd5c46a
# debug_ask_ai_feature.py - Debug the specific Ask AI feature
import os
from dotenv import load_dotenv
import traceback
def debug_ask_ai_feature():
"""Debug the Ask AI feature specifically"""
print("πŸ” Debugging Ask AI Feature")
print("=" * 60)
# Load environment
load_dotenv()
print("πŸ“‹ Environment Check:")
print(f" OPENROUTER_MODEL: {os.getenv('OPENROUTER_MODEL')}")
print(f" OPENROUTER_API_KEY: {os.getenv('OPENROUTER_API_KEY')[:20]}...")
try:
# Import the modules
from translator import get_translator
from ai_questions import get_ai_question_engine
print("\nπŸ”§ Getting instances...")
translator = get_translator()
question_engine = get_ai_question_engine()
print(f"πŸ“‹ Translator model: {translator.openrouter_model}")
print(f"πŸ“‹ Engine translator model: {question_engine.translator.openrouter_model}")
# Test the exact scenario from the UI
print("\nπŸ§ͺ Testing Ask AI scenario...")
selected_text = "Hello, this is and this is a test. I just that I want to see"
question = "Ψ§Ψ΄Ψ±Ψ­ Ω‡Ψ°Ψ§ Ψ§Ω„Ω†Ψ΅ Ψ¨Ψ§Ω„ΨͺΩΨ΅ΩŠΩ„"
segment_info = {
'id': 'test_segment',
'start_ms': 0,
'end_ms': 0
}
print(f"πŸ“ Selected text: {selected_text}")
print(f"❓ Question: {question}")
print(f"🎯 Preferred model: OpenRouter AI")
# Call process_question with OpenRouter AI specifically
result = question_engine.process_question(
selected_text=selected_text,
question=question,
segment_info=segment_info,
ui_language='ar',
session_id=None,
preferred_model='OpenRouter AI'
)
if len(result) == 4:
answer, error, session_id, model_used = result
else:
answer, error, session_id = result
model_used = "Unknown"
print(f"\nπŸ“Š Results:")
print(f" Answer: {'βœ… Success' if answer else '❌ Failed'}")
print(f" Error: {error}")
print(f" Model used: {model_used}")
print(f" Session ID: {session_id}")
if answer:
print(f" Answer preview: {answer[:100]}...")
if error and "meta-llama-3-70b-instruct" in str(error):
print("\n🚨 FOUND THE PROBLEM!")
print("The old model is being used somewhere in the process")
# Let's trace where this is coming from
print("\nπŸ” Tracing the error source...")
# Check the translator's _openrouter_complete method directly
print("Testing _openrouter_complete directly...")
test_prompt = "Test prompt"
response, error = translator._openrouter_complete(test_prompt)
if error and "meta-llama-3-70b-instruct" in str(error):
print("🎯 Error is coming from _openrouter_complete method!")
print(f"Error: {error}")
else:
print("βœ… _openrouter_complete works fine")
print(f"Response: {response[:50] if response else 'None'}...")
return True
except Exception as e:
print(f"πŸ’₯ Exception: {str(e)}")
print(f"πŸ“‹ Traceback: {traceback.format_exc()}")
if "meta-llama-3-70b-instruct" in str(e):
print("\n🚨 FOUND THE PROBLEM IN EXCEPTION!")
print("The old model reference is in the exception")
return False
def test_openrouter_direct_call():
"""Test OpenRouter direct call to see what's happening"""
print("\nπŸ” Testing OpenRouter Direct Call")
print("=" * 60)
try:
from translator import get_translator
translator = get_translator()
print(f"πŸ“‹ Translator model: {translator.openrouter_model}")
print(f"πŸ”‘ Has API key: {'Yes' if translator.openrouter_api_key else 'No'}")
# Test with a simple prompt
test_prompt = "Hello, respond with 'Test successful' in Arabic."
print(f"πŸ“ Test prompt: {test_prompt}")
print("πŸš€ Calling _openrouter_complete...")
response, error = translator._openrouter_complete(test_prompt)
print(f"πŸ“Š Results:")
print(f" Response: {'βœ… Success' if response else '❌ Failed'}")
print(f" Error: {error}")
if response:
print(f" Response text: {response}")
if error:
print(f" Error details: {error}")
if "meta-llama-3-70b-instruct" in str(error):
print("\n🚨 OLD MODEL FOUND IN ERROR!")
print("This means the old model is hardcoded somewhere")
# Let's check the candidates list
print("\nπŸ” Checking candidates list in translator...")
# We need to inspect the _openrouter_complete method
import inspect
source = inspect.getsource(translator._openrouter_complete)
if "meta-llama-3-70b-instruct" in source:
print("🎯 OLD MODEL FOUND IN SOURCE CODE!")
else:
print("βœ… No old model in source code")
return response is not None
except Exception as e:
print(f"πŸ’₯ Exception: {str(e)}")
print(f"πŸ“‹ Traceback: {traceback.format_exc()}")
return False
def main():
"""Main debug function"""
print("πŸš€ Ask AI Feature Debug Tool")
print("=" * 60)
# Test 1: Debug Ask AI feature
success1 = debug_ask_ai_feature()
# Test 2: Test OpenRouter direct call
success2 = test_openrouter_direct_call()
print("\n" + "=" * 60)
print("πŸ“Š Debug Results:")
print(f" Ask AI Feature: {'βœ… PASS' if success1 else '❌ FAIL'}")
print(f" OpenRouter Direct: {'βœ… PASS' if success2 else '❌ FAIL'}")
if not success1 or not success2:
print("\nπŸ’‘ Next steps:")
print(" 1. Check for hardcoded model references")
print(" 2. Clear all caches and restart")
print(" 3. Check for environment variable conflicts")
if __name__ == "__main__":
main()