File size: 2,443 Bytes
0b45200
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
import sys
from pathlib import Path
import os
import ssl

# TEMPORARY: Disable SSL verification for testing
os.environ['CURL_CA_BUNDLE'] = ''
os.environ['REQUESTS_CA_BUNDLE'] = ''

# Add project root to path
project_root = Path(__file__).parent.parent
sys.path.insert(0, str(project_root))

from langchain_openai import ChatOpenAI
from langchain.schema import HumanMessage
from config.settings import settings
import httpx

def test_openai_connection():
    """Test OpenAI API connection"""
    
    print("=" * 60)
    print("Testing OpenAI Connection for Capital Project Delivery System")
    print("=" * 60)
    
    try:
        # Validate settings
        settings.validate()
        print("✓ Environment variables loaded successfully")
        print(f"  Model: {settings.MODEL_NAME}")
        print(f"  Temperature: {settings.TEMPERATURE}")
        print(f"  Max Tokens: {settings.MAX_TOKENS}")
        print()
        
        # Create HTTP client that doesn't verify SSL (TEMPORARY FIX)
        http_client = httpx.Client(verify=False)
        
        # Initialize LangChain OpenAI with custom client
        llm = ChatOpenAI(
            model=settings.MODEL_NAME,
            temperature=settings.TEMPERATURE,
            max_tokens=settings.MAX_TOKENS,
            openai_api_key=settings.OPENAI_API_KEY,
            http_client=http_client
        )
        
        print("Testing API call...")
        
        # Simple test message
        test_message = HumanMessage(
            content="You are an orchestrating agent for a capital project delivery system. "
                   "Respond with 'Connection successful' if you can read this message."
        )
        
        response = llm.invoke([test_message])
        
        print("✓ OpenAI API connection successful!")
        print(f"\nResponse: {response.content}")
        print("\n" + "=" * 60)
        print("Setup is complete and working correctly!")
        print("=" * 60)
        print("\nNOTE: SSL verification is disabled. Fix certificates for production!")
        
        return True
        
    except ValueError as e:
        print(f"✗ Configuration Error: {e}")
        return False
    except Exception as e:
        print(f"✗ Connection Error: {e}")
        print(f"\nFull error details: {type(e).__name__}")
        import traceback
        traceback.print_exc()
        return False

if __name__ == "__main__":
    test_openai_connection()