Spaces:
Running
Running
Commit ·
eb874bf
1
Parent(s): 58c1ce7
removed token limit
Browse files- core_logic.py +1 -1
core_logic.py
CHANGED
|
@@ -52,7 +52,7 @@ def chat_function(message, history):
|
|
| 52 |
messages=messages,
|
| 53 |
stream=True,
|
| 54 |
temperature=0.0,
|
| 55 |
-
max_tokens=1024 # Limit response size to prevent mid-stream cuts
|
| 56 |
)
|
| 57 |
|
| 58 |
response_text = ""
|
|
|
|
| 52 |
messages=messages,
|
| 53 |
stream=True,
|
| 54 |
temperature=0.0,
|
| 55 |
+
#max_tokens=1024 # Limit response size to prevent mid-stream cuts
|
| 56 |
)
|
| 57 |
|
| 58 |
response_text = ""
|