Spaces:
Sleeping
Sleeping
Zwea Htet
commited on
Commit
·
9281dcc
1
Parent(s):
7704dca
update llamaCustom.py
Browse files- models/llamaCustom.py +1 -1
- requirements.txt +1 -1
models/llamaCustom.py
CHANGED
|
@@ -92,7 +92,7 @@ class OurLLM(CustomLLM):
|
|
| 92 |
return LLMMetadata(
|
| 93 |
context_window=CONTEXT_WINDOW,
|
| 94 |
num_output=NUM_OUTPUT,
|
| 95 |
-
|
| 96 |
)
|
| 97 |
|
| 98 |
@llm_completion_callback()
|
|
|
|
| 92 |
return LLMMetadata(
|
| 93 |
context_window=CONTEXT_WINDOW,
|
| 94 |
num_output=NUM_OUTPUT,
|
| 95 |
+
model_name=self.model_name,
|
| 96 |
)
|
| 97 |
|
| 98 |
@llm_completion_callback()
|
requirements.txt
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
llama_index
|
| 2 |
torch
|
| 3 |
transformers
|
| 4 |
panda
|
|
|
|
| 1 |
+
llama_index==0.8.64.post1
|
| 2 |
torch
|
| 3 |
transformers
|
| 4 |
panda
|