|
|
import sys |
|
|
from pathlib import Path |
|
|
import os |
|
|
import ssl |
|
|
|
|
|
|
|
|
os.environ['CURL_CA_BUNDLE'] = '' |
|
|
os.environ['REQUESTS_CA_BUNDLE'] = '' |
|
|
|
|
|
|
|
|
project_root = Path(__file__).parent.parent |
|
|
sys.path.insert(0, str(project_root)) |
|
|
|
|
|
from langchain_openai import ChatOpenAI |
|
|
from langchain.schema import HumanMessage |
|
|
from config.settings import settings |
|
|
import httpx |
|
|
|
|
|
def test_openai_connection(): |
|
|
"""Test OpenAI API connection""" |
|
|
|
|
|
print("=" * 60) |
|
|
print("Testing OpenAI Connection for Capital Project Delivery System") |
|
|
print("=" * 60) |
|
|
|
|
|
try: |
|
|
|
|
|
settings.validate() |
|
|
print("β Environment variables loaded successfully") |
|
|
print(f" Model: {settings.MODEL_NAME}") |
|
|
print(f" Temperature: {settings.TEMPERATURE}") |
|
|
print(f" Max Tokens: {settings.MAX_TOKENS}") |
|
|
print() |
|
|
|
|
|
|
|
|
http_client = httpx.Client(verify=False) |
|
|
|
|
|
|
|
|
llm = ChatOpenAI( |
|
|
model=settings.MODEL_NAME, |
|
|
temperature=settings.TEMPERATURE, |
|
|
max_tokens=settings.MAX_TOKENS, |
|
|
openai_api_key=settings.OPENAI_API_KEY, |
|
|
http_client=http_client |
|
|
) |
|
|
|
|
|
print("Testing API call...") |
|
|
|
|
|
|
|
|
test_message = HumanMessage( |
|
|
content="You are an orchestrating agent for a capital project delivery system. " |
|
|
"Respond with 'Connection successful' if you can read this message." |
|
|
) |
|
|
|
|
|
response = llm.invoke([test_message]) |
|
|
|
|
|
print("β OpenAI API connection successful!") |
|
|
print(f"\nResponse: {response.content}") |
|
|
print("\n" + "=" * 60) |
|
|
print("Setup is complete and working correctly!") |
|
|
print("=" * 60) |
|
|
print("\nNOTE: SSL verification is disabled. Fix certificates for production!") |
|
|
|
|
|
return True |
|
|
|
|
|
except ValueError as e: |
|
|
print(f"β Configuration Error: {e}") |
|
|
return False |
|
|
except Exception as e: |
|
|
print(f"β Connection Error: {e}") |
|
|
print(f"\nFull error details: {type(e).__name__}") |
|
|
import traceback |
|
|
traceback.print_exc() |
|
|
return False |
|
|
|
|
|
if __name__ == "__main__": |
|
|
test_openai_connection() |