Spaces:
Sleeping
Sleeping
Claude Claude commited on
Fix Anthropic client initialization error
Browse files- Update anthropic SDK to >=0.34.0 for better compatibility
- Add httpx dependency for explicit HTTP client control
- Add fallback initialization for proxies parameter issues
- Handle TypeError gracefully when initializing Anthropic client
Resolves: "Client.init() got an unexpected keyword argument 'proxies'"
🤖 Generated with [Claude Code](https://claude.com/claude-code)
Co-Authored-By: Claude <noreply@anthropic.com>
- requirements_hf.txt +2 -1
- src/llm/anthropic_client.py +14 -1
requirements_hf.txt
CHANGED
|
@@ -2,7 +2,8 @@
|
|
| 2 |
# For GPU-accelerated deployment with Be.FM model
|
| 3 |
|
| 4 |
streamlit>=1.28.0
|
| 5 |
-
anthropic>=0.
|
|
|
|
| 6 |
pydantic>=2.0.0
|
| 7 |
plotly>=5.17.0
|
| 8 |
networkx>=3.1
|
|
|
|
| 2 |
# For GPU-accelerated deployment with Be.FM model
|
| 3 |
|
| 4 |
streamlit>=1.28.0
|
| 5 |
+
anthropic>=0.34.0
|
| 6 |
+
httpx>=0.24.0
|
| 7 |
pydantic>=2.0.0
|
| 8 |
plotly>=5.17.0
|
| 9 |
networkx>=3.1
|
src/llm/anthropic_client.py
CHANGED
|
@@ -40,7 +40,20 @@ class AnthropicClient:
|
|
| 40 |
self.max_tokens = int(os.getenv("LLM_MAX_TOKENS", max_tokens))
|
| 41 |
self.temperature = float(os.getenv("LLM_TEMPERATURE", temperature))
|
| 42 |
|
| 43 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 44 |
|
| 45 |
def generate_response(
|
| 46 |
self,
|
|
|
|
| 40 |
self.max_tokens = int(os.getenv("LLM_MAX_TOKENS", max_tokens))
|
| 41 |
self.temperature = float(os.getenv("LLM_TEMPERATURE", temperature))
|
| 42 |
|
| 43 |
+
# Initialize Anthropic client with explicit parameters only
|
| 44 |
+
try:
|
| 45 |
+
self.client = Anthropic(api_key=self.api_key)
|
| 46 |
+
except TypeError as e:
|
| 47 |
+
# Fallback for version compatibility issues
|
| 48 |
+
if "proxies" in str(e):
|
| 49 |
+
# Try without any httpx configuration
|
| 50 |
+
import httpx
|
| 51 |
+
self.client = Anthropic(
|
| 52 |
+
api_key=self.api_key,
|
| 53 |
+
http_client=httpx.Client()
|
| 54 |
+
)
|
| 55 |
+
else:
|
| 56 |
+
raise
|
| 57 |
|
| 58 |
def generate_response(
|
| 59 |
self,
|