syncmaster8 / debug_specific_issue.py
aseelflihan's picture
Initial commit without node_modules
33d3592
# debug_specific_issue.py - Debug the specific issue with OpenRouter model
import os
import sys
from dotenv import load_dotenv
def debug_openrouter_issue():
"""Debug the specific OpenRouter model issue"""
print("πŸ” Debugging OpenRouter Model Issue")
print("=" * 60)
# Load environment
load_dotenv()
# Check environment
print(f"πŸ“‹ OPENROUTER_MODEL: {os.getenv('OPENROUTER_MODEL')}")
print(f"πŸ”‘ OPENROUTER_API_KEY: {os.getenv('OPENROUTER_API_KEY')[:20]}...")
# Import and test translator
try:
from translator import get_translator
translator = get_translator()
print(f"πŸ“‹ Translator model: {translator.openrouter_model}")
# Test direct OpenRouter call
test_prompt = "Hello, respond with 'Test successful' in Arabic."
print(f"\nπŸ§ͺ Testing OpenRouter with prompt: {test_prompt}")
response, error = translator._openrouter_complete(test_prompt)
if response:
print(f"βœ… Success: {response}")
return True
else:
print(f"❌ Error: {error}")
# Check if error mentions old model
if "meta-llama-3-70b-instruct" in str(error):
print("🚨 ERROR: Old model found in error message!")
print("πŸ” This means the old model is being used somewhere...")
# Let's check the source code of _openrouter_complete
import inspect
source = inspect.getsource(translator._openrouter_complete)
# Look for hardcoded models
lines = source.split('\n')
for i, line in enumerate(lines):
if 'meta-llama' in line and '70b' in line:
print(f"🚨 Found old model in line {i}: {line.strip()}")
return False
else:
print("ℹ️ Error doesn't mention old model")
return False
except Exception as e:
print(f"πŸ’₯ Exception: {str(e)}")
return False
def test_ai_questions_engine():
"""Test AI Questions Engine specifically"""
print("\nπŸ€– Testing AI Questions Engine")
print("=" * 60)
try:
from translator import get_translator
from ai_questions import AIQuestionEngine
translator = get_translator()
engine = AIQuestionEngine(translator)
# Test with OpenRouter AI specifically
test_text = "Hello, this is a test text."
test_question = "What does this text mean?"
print(f"πŸ“ Test text: {test_text}")
print(f"❓ Test question: {test_question}")
print(f"🎯 Preferred model: OpenRouter AI")
answer, error, session_id, model_used = engine.process_question(
selected_text=test_text,
question=test_question,
segment_info={"id": "test_segment"},
ui_language='en',
preferred_model='OpenRouter AI'
)
if answer:
print(f"βœ… Success!")
print(f"πŸ“ Answer: {answer[:100]}...")
print(f"πŸ”§ Model used: {model_used}")
return True
else:
print(f"❌ Error: {error}")
# Check if error mentions old model
if "meta-llama-3-70b-instruct" in str(error):
print("🚨 ERROR: Old model found in AI Questions error!")
return False
else:
print("ℹ️ Error doesn't mention old model")
return False
except Exception as e:
print(f"πŸ’₯ Exception: {str(e)}")
# Check if exception mentions old model
if "meta-llama-3-70b-instruct" in str(e):
print("🚨 ERROR: Old model found in exception!")
return False
else:
print("ℹ️ Exception doesn't mention old model")
return False
def check_for_cached_instances():
"""Check for any cached instances that might have old model"""
print("\nπŸ—‚οΈ Checking for Cached Instances")
print("=" * 60)
# Check if there are any cached translator instances
try:
import streamlit as st
# Simulate session state
if hasattr(st, 'session_state'):
print("πŸ“‹ Streamlit session state available")
# Look for any cached instances
cached_keys = []
for key in st.session_state.keys():
if any(term in key.lower() for term in ['translator', 'ai', 'question', 'model']):
cached_keys.append(key)
if cached_keys:
print(f"πŸ—‘οΈ Found cached keys: {cached_keys}")
# Clear them
for key in cached_keys:
del st.session_state[key]
print("βœ… Cleared cached instances")
else:
print("βœ… No cached instances found")
else:
print("ℹ️ Streamlit session state not available")
except Exception as e:
print(f"ℹ️ Streamlit not available: {str(e)}")
def main():
"""Main debug function"""
print("πŸš€ Specific OpenRouter Issue Debug")
print("=" * 60)
# Step 1: Check for cached instances
check_for_cached_instances()
# Step 2: Test translator directly
translator_ok = debug_openrouter_issue()
# Step 3: Test AI Questions engine
ai_questions_ok = test_ai_questions_engine()
print("\n" + "=" * 60)
print("πŸ“Š Debug Results:")
print(f" Translator: {'βœ… OK' if translator_ok else '❌ FAILED'}")
print(f" AI Questions: {'βœ… OK' if ai_questions_ok else '❌ FAILED'}")
if translator_ok and ai_questions_ok:
print("\nπŸŽ‰ All tests passed! The issue might be resolved.")
else:
print("\n⚠️ Issue still exists. Need further investigation.")
if not translator_ok:
print("πŸ’‘ The problem is in the translator module")
if not ai_questions_ok:
print("πŸ’‘ The problem is in the AI questions engine")
if __name__ == "__main__":
main()