Spaces:
Sleeping
Sleeping
| # debug_specific_issue.py - Debug the specific issue with OpenRouter model | |
| import os | |
| import sys | |
| from dotenv import load_dotenv | |
| def debug_openrouter_issue(): | |
| """Debug the specific OpenRouter model issue""" | |
| print("π Debugging OpenRouter Model Issue") | |
| print("=" * 60) | |
| # Load environment | |
| load_dotenv() | |
| # Check environment | |
| print(f"π OPENROUTER_MODEL: {os.getenv('OPENROUTER_MODEL')}") | |
| print(f"π OPENROUTER_API_KEY: {os.getenv('OPENROUTER_API_KEY')[:20]}...") | |
| # Import and test translator | |
| try: | |
| from translator import get_translator | |
| translator = get_translator() | |
| print(f"π Translator model: {translator.openrouter_model}") | |
| # Test direct OpenRouter call | |
| test_prompt = "Hello, respond with 'Test successful' in Arabic." | |
| print(f"\nπ§ͺ Testing OpenRouter with prompt: {test_prompt}") | |
| response, error = translator._openrouter_complete(test_prompt) | |
| if response: | |
| print(f"β Success: {response}") | |
| return True | |
| else: | |
| print(f"β Error: {error}") | |
| # Check if error mentions old model | |
| if "meta-llama-3-70b-instruct" in str(error): | |
| print("π¨ ERROR: Old model found in error message!") | |
| print("π This means the old model is being used somewhere...") | |
| # Let's check the source code of _openrouter_complete | |
| import inspect | |
| source = inspect.getsource(translator._openrouter_complete) | |
| # Look for hardcoded models | |
| lines = source.split('\n') | |
| for i, line in enumerate(lines): | |
| if 'meta-llama' in line and '70b' in line: | |
| print(f"π¨ Found old model in line {i}: {line.strip()}") | |
| return False | |
| else: | |
| print("βΉοΈ Error doesn't mention old model") | |
| return False | |
| except Exception as e: | |
| print(f"π₯ Exception: {str(e)}") | |
| return False | |
| def test_ai_questions_engine(): | |
| """Test AI Questions Engine specifically""" | |
| print("\nπ€ Testing AI Questions Engine") | |
| print("=" * 60) | |
| try: | |
| from translator import get_translator | |
| from ai_questions import AIQuestionEngine | |
| translator = get_translator() | |
| engine = AIQuestionEngine(translator) | |
| # Test with OpenRouter AI specifically | |
| test_text = "Hello, this is a test text." | |
| test_question = "What does this text mean?" | |
| print(f"π Test text: {test_text}") | |
| print(f"β Test question: {test_question}") | |
| print(f"π― Preferred model: OpenRouter AI") | |
| answer, error, session_id, model_used = engine.process_question( | |
| selected_text=test_text, | |
| question=test_question, | |
| segment_info={"id": "test_segment"}, | |
| ui_language='en', | |
| preferred_model='OpenRouter AI' | |
| ) | |
| if answer: | |
| print(f"β Success!") | |
| print(f"π Answer: {answer[:100]}...") | |
| print(f"π§ Model used: {model_used}") | |
| return True | |
| else: | |
| print(f"β Error: {error}") | |
| # Check if error mentions old model | |
| if "meta-llama-3-70b-instruct" in str(error): | |
| print("π¨ ERROR: Old model found in AI Questions error!") | |
| return False | |
| else: | |
| print("βΉοΈ Error doesn't mention old model") | |
| return False | |
| except Exception as e: | |
| print(f"π₯ Exception: {str(e)}") | |
| # Check if exception mentions old model | |
| if "meta-llama-3-70b-instruct" in str(e): | |
| print("π¨ ERROR: Old model found in exception!") | |
| return False | |
| else: | |
| print("βΉοΈ Exception doesn't mention old model") | |
| return False | |
| def check_for_cached_instances(): | |
| """Check for any cached instances that might have old model""" | |
| print("\nποΈ Checking for Cached Instances") | |
| print("=" * 60) | |
| # Check if there are any cached translator instances | |
| try: | |
| import streamlit as st | |
| # Simulate session state | |
| if hasattr(st, 'session_state'): | |
| print("π Streamlit session state available") | |
| # Look for any cached instances | |
| cached_keys = [] | |
| for key in st.session_state.keys(): | |
| if any(term in key.lower() for term in ['translator', 'ai', 'question', 'model']): | |
| cached_keys.append(key) | |
| if cached_keys: | |
| print(f"ποΈ Found cached keys: {cached_keys}") | |
| # Clear them | |
| for key in cached_keys: | |
| del st.session_state[key] | |
| print("β Cleared cached instances") | |
| else: | |
| print("β No cached instances found") | |
| else: | |
| print("βΉοΈ Streamlit session state not available") | |
| except Exception as e: | |
| print(f"βΉοΈ Streamlit not available: {str(e)}") | |
| def main(): | |
| """Main debug function""" | |
| print("π Specific OpenRouter Issue Debug") | |
| print("=" * 60) | |
| # Step 1: Check for cached instances | |
| check_for_cached_instances() | |
| # Step 2: Test translator directly | |
| translator_ok = debug_openrouter_issue() | |
| # Step 3: Test AI Questions engine | |
| ai_questions_ok = test_ai_questions_engine() | |
| print("\n" + "=" * 60) | |
| print("π Debug Results:") | |
| print(f" Translator: {'β OK' if translator_ok else 'β FAILED'}") | |
| print(f" AI Questions: {'β OK' if ai_questions_ok else 'β FAILED'}") | |
| if translator_ok and ai_questions_ok: | |
| print("\nπ All tests passed! The issue might be resolved.") | |
| else: | |
| print("\nβ οΈ Issue still exists. Need further investigation.") | |
| if not translator_ok: | |
| print("π‘ The problem is in the translator module") | |
| if not ai_questions_ok: | |
| print("π‘ The problem is in the AI questions engine") | |
| if __name__ == "__main__": | |
| main() |