Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -18,31 +18,31 @@ DB_FAISS_PATH = 'vectorstore/db_faiss'
|
|
| 18 |
# Load the model of choice
|
| 19 |
def load_llm():
|
| 20 |
# url = "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/blob/main/llama-2-7b-chat.ggmlv3.q4_K_M.bin" # 2.87G
|
| 21 |
-
url ="https://huggingface.co/skeskinen/llama-lite-134m/blob/main/pytorch_model.bin"
|
| 22 |
-
model_loc, file_size = dl_hf_model(url)
|
| 23 |
|
| 24 |
-
llm = CTransformers(
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
|
| 38 |
|
| 39 |
-
)
|
| 40 |
# llm = G4FLLM(
|
| 41 |
# model=models.gpt_35_turbo,
|
| 42 |
# provider=Provider.Aichatos,
|
| 43 |
# )
|
| 44 |
-
|
| 45 |
-
|
| 46 |
return llm
|
| 47 |
hide_streamlit_style = """
|
| 48 |
<style>
|
|
|
|
| 18 |
# Load the model of choice
|
| 19 |
def load_llm():
|
| 20 |
# url = "https://huggingface.co/TheBloke/Llama-2-7B-Chat-GGML/blob/main/llama-2-7b-chat.ggmlv3.q4_K_M.bin" # 2.87G
|
| 21 |
+
# url ="https://huggingface.co/skeskinen/llama-lite-134m/blob/main/pytorch_model.bin"
|
| 22 |
+
# model_loc, file_size = dl_hf_model(url)
|
| 23 |
|
| 24 |
+
# llm = CTransformers(
|
| 25 |
+
# model=model_loc,
|
| 26 |
+
# temperature=0.2,
|
| 27 |
+
# model_type="llama",
|
| 28 |
+
# top_k=10,
|
| 29 |
+
# top_p=0.9,
|
| 30 |
+
# repetition_penalty=1.0,
|
| 31 |
+
# max_new_tokens=512, # adjust as needed
|
| 32 |
+
# seed=42,
|
| 33 |
+
# reset=True, # reset history (cache)
|
| 34 |
+
# stream=False,
|
| 35 |
+
# # threads=cpu_count,
|
| 36 |
+
# # stop=prompt_prefix[1:2],
|
| 37 |
|
| 38 |
|
| 39 |
+
# )
|
| 40 |
# llm = G4FLLM(
|
| 41 |
# model=models.gpt_35_turbo,
|
| 42 |
# provider=Provider.Aichatos,
|
| 43 |
# )
|
| 44 |
+
OPENAI_API_KEY = 'sk-proj-0eSbY1eTGDvUWXjKwUYeT3BlbkFJ7zG1D208fWZlYi0ByZwa'
|
| 45 |
+
llm =ChatOpenAI(openai_api_key=OPENAI_API_KEY,temperature=0)
|
| 46 |
return llm
|
| 47 |
hide_streamlit_style = """
|
| 48 |
<style>
|