pratikshahp commited on
Commit
df42853
·
verified ·
1 Parent(s): e51771f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +20 -3
app.py CHANGED
@@ -1,6 +1,7 @@
1
  from llama_index.core import VectorStoreIndex,SimpleDirectoryReader,ServiceContext
2
  from llama_index_llms_huggingface import HuggingFaceLLM
3
- from llama_index.prompts.prompts import SimpleInputPrompt
 
4
  import os
5
  from dotenv import load_dotenv
6
  from huggingface_hub import login
@@ -12,15 +13,31 @@ from llama_index.embeddings import LangchainEmbedding
12
 
13
  # Get the environment variables
14
  HF_TOKEN = os.getenv('HUGGING_FACE_TOKEN')
 
15
 
16
  documents=SimpleDirectoryReader("/state-of-the-union.txt").load_data()
 
17
  system_prompt="""
18
  You are a Q&A assistant. Your goal is to answer questions as
19
  accurately as possible based on the instructions and context provided.
20
  """
21
  ## Default format supportable by LLama2
22
- query_wrapper_prompt=SimpleInputPrompt("<|USER|>{query_str}<|ASSISTANT|>")
23
- login(token=HF_TOKEN)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
 
25
  llm = HuggingFaceLLM(
26
  context_window=4096,
 
1
  from llama_index.core import VectorStoreIndex,SimpleDirectoryReader,ServiceContext
2
  from llama_index_llms_huggingface import HuggingFaceLLM
3
+ from llama_index.core import PromptTemplate
4
+
5
  import os
6
  from dotenv import load_dotenv
7
  from huggingface_hub import login
 
13
 
14
  # Get the environment variables
15
  HF_TOKEN = os.getenv('HUGGING_FACE_TOKEN')
16
+ login(token=HF_TOKEN)
17
 
18
  documents=SimpleDirectoryReader("/state-of-the-union.txt").load_data()
19
+
20
  system_prompt="""
21
  You are a Q&A assistant. Your goal is to answer questions as
22
  accurately as possible based on the instructions and context provided.
23
  """
24
  ## Default format supportable by LLama2
25
+ template = (
26
+ "We have provided context information below. \n"
27
+ "---------------------\n"
28
+ "{context_str}"
29
+ "\n---------------------\n"
30
+ "Given this information, please answer the question: {query_str}\n"
31
+ )
32
+ #query_wrapper_prompt=SimpleInputPrompt("<|USER|>{query_str}<|ASSISTANT|>")
33
+
34
+ query_wrapper_prompt = PromptTemplate(template)
35
+
36
+ # you can create text prompt (for completion API)
37
+ #prompt = qa_template.format(context_str=..., query_str=...)
38
+
39
+ # or easily convert to message prompts (for chat API)
40
+ #messages = qa_template.format_messages(context_str=..., query_str=...)
41
 
42
  llm = HuggingFaceLLM(
43
  context_window=4096,