DeepSoft-Tech commited on
Commit
113fa3a
·
verified ·
1 Parent(s): ee01867

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -5
app.py CHANGED
@@ -8,18 +8,18 @@ from langchain.chains.question_answering import load_qa_chain
8
  from langchain.callbacks import get_openai_callback
9
  from langchain import HuggingFaceHub, LLMChain
10
  from langchain.embeddings import HuggingFaceHubEmbeddings,HuggingFaceInferenceAPIEmbeddings
11
- token = os.environ['HF_TOKEN']
12
  repo_id = "sentence-transformers/all-mpnet-base-v2"
13
  hf = HuggingFaceHubEmbeddings(
14
  repo_id=repo_id,
15
  task="feature-extraction",
16
- huggingfacehub_api_token= token,
17
  )
18
 
19
  from langchain.embeddings import HuggingFaceInferenceAPIEmbeddings
20
 
21
  embeddings = HuggingFaceInferenceAPIEmbeddings(
22
- api_key=token, model_name="sentence-transformers/all-MiniLM-l6-v2"
23
  )
24
 
25
 
@@ -63,8 +63,8 @@ def main():
63
 
64
  hub_llm = HuggingFaceHub(
65
  repo_id='HuggingFaceH4/zephyr-7b-beta',
66
- model_kwargs={'temperature':0.01,"max_length": 2048,},
67
- huggingfacehub_api_token=token)
68
  llm = hub_llm
69
  chain = load_qa_chain(llm, chain_type="stuff")
70
  with get_openai_callback() as cb:
 
8
  from langchain.callbacks import get_openai_callback
9
  from langchain import HuggingFaceHub, LLMChain
10
  from langchain.embeddings import HuggingFaceHubEmbeddings,HuggingFaceInferenceAPIEmbeddings
11
+ # token = os.environ['HF_TOKEN']
12
  repo_id = "sentence-transformers/all-mpnet-base-v2"
13
  hf = HuggingFaceHubEmbeddings(
14
  repo_id=repo_id,
15
  task="feature-extraction",
16
+ # huggingfacehub_api_token= token,
17
  )
18
 
19
  from langchain.embeddings import HuggingFaceInferenceAPIEmbeddings
20
 
21
  embeddings = HuggingFaceInferenceAPIEmbeddings(
22
+ model_name="sentence-transformers/all-MiniLM-l6-v2"
23
  )
24
 
25
 
 
63
 
64
  hub_llm = HuggingFaceHub(
65
  repo_id='HuggingFaceH4/zephyr-7b-beta',
66
+ model_kwargs={'temperature':0.01,"max_length": 2048,})
67
+ # huggingfacehub_api_token=token)
68
  llm = hub_llm
69
  chain = load_qa_chain(llm, chain_type="stuff")
70
  with get_openai_callback() as cb: