fix bug LLM
Browse files- app/llm.py +15 -14
app/llm.py
CHANGED
|
@@ -101,6 +101,7 @@ class LLMClient:
|
|
| 101 |
Returns:
|
| 102 |
str: Text được tạo ra
|
| 103 |
"""
|
|
|
|
| 104 |
try:
|
| 105 |
if self.provider == "openai":
|
| 106 |
return await self._generate_openai(prompt, system_prompt, **kwargs)
|
|
@@ -114,9 +115,8 @@ class LLMClient:
|
|
| 114 |
return await self._generate_hfs(prompt, **kwargs)
|
| 115 |
else:
|
| 116 |
raise ValueError(f"Unsupported provider: {self.provider}")
|
| 117 |
-
|
| 118 |
except Exception as e:
|
| 119 |
-
logger.error(f"Error generating text with {self.provider}: {e}")
|
| 120 |
raise
|
| 121 |
|
| 122 |
async def _generate_openai(self, prompt: str, system_prompt: Optional[str] = None, **kwargs) -> str:
|
|
@@ -220,23 +220,23 @@ class LLMClient:
|
|
| 220 |
|
| 221 |
async def _generate_hfs(self, prompt: str, **kwargs) -> str:
|
| 222 |
"""Generate text với HFS provider."""
|
| 223 |
-
|
| 224 |
-
|
| 225 |
-
}
|
| 226 |
-
|
| 227 |
headers = {}
|
| 228 |
if self.api_key:
|
| 229 |
headers["Authorization"] = f"Bearer {self.api_key}"
|
| 230 |
-
|
| 231 |
-
response
|
| 232 |
-
f"{self.base_url}/purechat",
|
| 233 |
-
headers=headers,
|
| 234 |
-
json=payload
|
| 235 |
-
)
|
| 236 |
response.raise_for_status()
|
| 237 |
-
|
| 238 |
data = response.json()
|
| 239 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 240 |
|
| 241 |
@timing_decorator_async
|
| 242 |
async def chat(
|
|
@@ -254,6 +254,7 @@ class LLMClient:
|
|
| 254 |
Returns:
|
| 255 |
str: Response từ LLM
|
| 256 |
"""
|
|
|
|
| 257 |
if self.provider == "openai":
|
| 258 |
return await self._chat_openai(messages, **kwargs)
|
| 259 |
else:
|
|
|
|
| 101 |
Returns:
|
| 102 |
str: Text được tạo ra
|
| 103 |
"""
|
| 104 |
+
logger.info(f"[LLM] generate_text - provider: {self.provider}, prompt: {prompt}")
|
| 105 |
try:
|
| 106 |
if self.provider == "openai":
|
| 107 |
return await self._generate_openai(prompt, system_prompt, **kwargs)
|
|
|
|
| 115 |
return await self._generate_hfs(prompt, **kwargs)
|
| 116 |
else:
|
| 117 |
raise ValueError(f"Unsupported provider: {self.provider}")
|
|
|
|
| 118 |
except Exception as e:
|
| 119 |
+
logger.error(f"[LLM] Error generating text with {self.provider}: {e}")
|
| 120 |
raise
|
| 121 |
|
| 122 |
async def _generate_openai(self, prompt: str, system_prompt: Optional[str] = None, **kwargs) -> str:
|
|
|
|
| 220 |
|
| 221 |
async def _generate_hfs(self, prompt: str, **kwargs) -> str:
|
| 222 |
"""Generate text với HFS provider."""
|
| 223 |
+
endpoint = f"{self.base_url}/purechat"
|
| 224 |
+
payload = {"prompt": prompt}
|
| 225 |
+
logger.info(f"[LLM] _generate_hfs - endpoint: {endpoint}, payload: {payload}")
|
|
|
|
| 226 |
headers = {}
|
| 227 |
if self.api_key:
|
| 228 |
headers["Authorization"] = f"Bearer {self.api_key}"
|
| 229 |
+
response = await self._client.post(endpoint, headers=headers, json=payload)
|
| 230 |
+
logger.info(f"[LLM] _generate_hfs - response status: {response.status_code}")
|
|
|
|
|
|
|
|
|
|
|
|
|
| 231 |
response.raise_for_status()
|
|
|
|
| 232 |
data = response.json()
|
| 233 |
+
logger.info(f"[LLM] _generate_hfs - response data: {data}")
|
| 234 |
+
# Giả sử response là {'result': '...'} hoặc {'data': ['...']}
|
| 235 |
+
if 'result' in data:
|
| 236 |
+
return data['result']
|
| 237 |
+
elif 'data' in data and isinstance(data['data'], list):
|
| 238 |
+
return data['data'][0]
|
| 239 |
+
return str(data)
|
| 240 |
|
| 241 |
@timing_decorator_async
|
| 242 |
async def chat(
|
|
|
|
| 254 |
Returns:
|
| 255 |
str: Response từ LLM
|
| 256 |
"""
|
| 257 |
+
logger.info(f"[LLM] chat - messages: {messages}")
|
| 258 |
if self.provider == "openai":
|
| 259 |
return await self._chat_openai(messages, **kwargs)
|
| 260 |
else:
|