Spaces:
Sleeping
Sleeping
Update src/tinny_llama.py
Browse files- src/tinny_llama.py +1 -2
src/tinny_llama.py
CHANGED
|
@@ -132,8 +132,7 @@ def initialize_llm() -> Optional[HuggingFacePipeline]:
|
|
| 132 |
do_sample=True,
|
| 133 |
temperature=CONFIG["temperature"],
|
| 134 |
top_p=CONFIG["top_p"],
|
| 135 |
-
truncation=True
|
| 136 |
-
device=-1 # Explicitly set to CPU
|
| 137 |
)
|
| 138 |
logger.info("LLM initialized successfully.")
|
| 139 |
return HuggingFacePipeline(pipeline=text_gen)
|
|
|
|
| 132 |
do_sample=True,
|
| 133 |
temperature=CONFIG["temperature"],
|
| 134 |
top_p=CONFIG["top_p"],
|
| 135 |
+
truncation=True
|
|
|
|
| 136 |
)
|
| 137 |
logger.info("LLM initialized successfully.")
|
| 138 |
return HuggingFacePipeline(pipeline=text_gen)
|