Spaces:
Sleeping
Sleeping
File size: 6,506 Bytes
fd5c46a | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 | # debug_ask_ai_feature.py - Debug the specific Ask AI feature
import os
from dotenv import load_dotenv
import traceback
def debug_ask_ai_feature():
"""Debug the Ask AI feature specifically"""
print("π Debugging Ask AI Feature")
print("=" * 60)
# Load environment
load_dotenv()
print("π Environment Check:")
print(f" OPENROUTER_MODEL: {os.getenv('OPENROUTER_MODEL')}")
print(f" OPENROUTER_API_KEY: {os.getenv('OPENROUTER_API_KEY')[:20]}...")
try:
# Import the modules
from translator import get_translator
from ai_questions import get_ai_question_engine
print("\nπ§ Getting instances...")
translator = get_translator()
question_engine = get_ai_question_engine()
print(f"π Translator model: {translator.openrouter_model}")
print(f"π Engine translator model: {question_engine.translator.openrouter_model}")
# Test the exact scenario from the UI
print("\nπ§ͺ Testing Ask AI scenario...")
selected_text = "Hello, this is and this is a test. I just that I want to see"
question = "Ψ§Ψ΄Ψ±Ψ ΩΨ°Ψ§ Ψ§ΩΩΨ΅ Ψ¨Ψ§ΩΨͺΩΨ΅ΩΩ"
segment_info = {
'id': 'test_segment',
'start_ms': 0,
'end_ms': 0
}
print(f"π Selected text: {selected_text}")
print(f"β Question: {question}")
print(f"π― Preferred model: OpenRouter AI")
# Call process_question with OpenRouter AI specifically
result = question_engine.process_question(
selected_text=selected_text,
question=question,
segment_info=segment_info,
ui_language='ar',
session_id=None,
preferred_model='OpenRouter AI'
)
if len(result) == 4:
answer, error, session_id, model_used = result
else:
answer, error, session_id = result
model_used = "Unknown"
print(f"\nπ Results:")
print(f" Answer: {'β
Success' if answer else 'β Failed'}")
print(f" Error: {error}")
print(f" Model used: {model_used}")
print(f" Session ID: {session_id}")
if answer:
print(f" Answer preview: {answer[:100]}...")
if error and "meta-llama-3-70b-instruct" in str(error):
print("\nπ¨ FOUND THE PROBLEM!")
print("The old model is being used somewhere in the process")
# Let's trace where this is coming from
print("\nπ Tracing the error source...")
# Check the translator's _openrouter_complete method directly
print("Testing _openrouter_complete directly...")
test_prompt = "Test prompt"
response, error = translator._openrouter_complete(test_prompt)
if error and "meta-llama-3-70b-instruct" in str(error):
print("π― Error is coming from _openrouter_complete method!")
print(f"Error: {error}")
else:
print("β
_openrouter_complete works fine")
print(f"Response: {response[:50] if response else 'None'}...")
return True
except Exception as e:
print(f"π₯ Exception: {str(e)}")
print(f"π Traceback: {traceback.format_exc()}")
if "meta-llama-3-70b-instruct" in str(e):
print("\nπ¨ FOUND THE PROBLEM IN EXCEPTION!")
print("The old model reference is in the exception")
return False
def test_openrouter_direct_call():
"""Test OpenRouter direct call to see what's happening"""
print("\nπ Testing OpenRouter Direct Call")
print("=" * 60)
try:
from translator import get_translator
translator = get_translator()
print(f"π Translator model: {translator.openrouter_model}")
print(f"π Has API key: {'Yes' if translator.openrouter_api_key else 'No'}")
# Test with a simple prompt
test_prompt = "Hello, respond with 'Test successful' in Arabic."
print(f"π Test prompt: {test_prompt}")
print("π Calling _openrouter_complete...")
response, error = translator._openrouter_complete(test_prompt)
print(f"π Results:")
print(f" Response: {'β
Success' if response else 'β Failed'}")
print(f" Error: {error}")
if response:
print(f" Response text: {response}")
if error:
print(f" Error details: {error}")
if "meta-llama-3-70b-instruct" in str(error):
print("\nπ¨ OLD MODEL FOUND IN ERROR!")
print("This means the old model is hardcoded somewhere")
# Let's check the candidates list
print("\nπ Checking candidates list in translator...")
# We need to inspect the _openrouter_complete method
import inspect
source = inspect.getsource(translator._openrouter_complete)
if "meta-llama-3-70b-instruct" in source:
print("π― OLD MODEL FOUND IN SOURCE CODE!")
else:
print("β
No old model in source code")
return response is not None
except Exception as e:
print(f"π₯ Exception: {str(e)}")
print(f"π Traceback: {traceback.format_exc()}")
return False
def main():
"""Main debug function"""
print("π Ask AI Feature Debug Tool")
print("=" * 60)
# Test 1: Debug Ask AI feature
success1 = debug_ask_ai_feature()
# Test 2: Test OpenRouter direct call
success2 = test_openrouter_direct_call()
print("\n" + "=" * 60)
print("π Debug Results:")
print(f" Ask AI Feature: {'β
PASS' if success1 else 'β FAIL'}")
print(f" OpenRouter Direct: {'β
PASS' if success2 else 'β FAIL'}")
if not success1 or not success2:
print("\nπ‘ Next steps:")
print(" 1. Check for hardcoded model references")
print(" 2. Clear all caches and restart")
print(" 3. Check for environment variable conflicts")
if __name__ == "__main__":
main() |