VietCat commited on
Commit
b39357e
·
1 Parent(s): dd1ea60

fix max token error

Browse files
Files changed (1) hide show
  1. app/llm.py +2 -2
app/llm.py CHANGED
@@ -165,8 +165,8 @@ class LLMClient:
165
  Tạo text từ prompt sử dụng LLM.
166
  """
167
  logger.info(
168
- f"[LLM] generate_text - provider: {self.provider}\n\t prompt: {_safe_truncate(prompt)}"
169
- # f"[LLM] generate_text - provider: {self.provider}\n\t prompt: {prompt}"
170
  )
171
  try:
172
  if self.provider == "openai":
 
165
  Tạo text từ prompt sử dụng LLM.
166
  """
167
  logger.info(
168
+ # f"[LLM] generate_text - provider: {self.provider}\n\t prompt: {_safe_truncate(prompt)}"
169
+ f"[LLM] generate_text - provider: {self.provider}\n\t prompt: {prompt}"
170
  )
171
  try:
172
  if self.provider == "openai":