removed max output tokens
Browse files
app.py
CHANGED
|
@@ -96,7 +96,7 @@ def main():
|
|
| 96 |
Settings.llm = OpenAI(
|
| 97 |
model=llm_name,
|
| 98 |
temperature=temperature,
|
| 99 |
-
max_tokens=MAX_OUTPUT_TOKENS
|
| 100 |
)
|
| 101 |
Settings.tokenizer = tiktoken.encoding_for_model(llm_name).encode
|
| 102 |
Settings.num_output = MAX_OUTPUT_TOKENS
|
|
@@ -109,7 +109,7 @@ def main():
|
|
| 109 |
model_name=llm_name,
|
| 110 |
token=os.environ.get("HFTOKEN"),
|
| 111 |
temperature=temperature,
|
| 112 |
-
max_tokens=MAX_OUTPUT_TOKENS
|
| 113 |
)
|
| 114 |
Settings.tokenizer = AutoTokenizer.from_pretrained(
|
| 115 |
llm_name,
|
|
|
|
| 96 |
Settings.llm = OpenAI(
|
| 97 |
model=llm_name,
|
| 98 |
temperature=temperature,
|
| 99 |
+
# max_tokens=MAX_OUTPUT_TOKENS
|
| 100 |
)
|
| 101 |
Settings.tokenizer = tiktoken.encoding_for_model(llm_name).encode
|
| 102 |
Settings.num_output = MAX_OUTPUT_TOKENS
|
|
|
|
| 109 |
model_name=llm_name,
|
| 110 |
token=os.environ.get("HFTOKEN"),
|
| 111 |
temperature=temperature,
|
| 112 |
+
# max_tokens=MAX_OUTPUT_TOKENS
|
| 113 |
)
|
| 114 |
Settings.tokenizer = AutoTokenizer.from_pretrained(
|
| 115 |
llm_name,
|