menikev commited on
Commit
f8bd3c2
·
verified ·
1 Parent(s): fb708df

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -18
app.py CHANGED
@@ -1,5 +1,4 @@
1
  import gradio as gr
2
- import os
3
  from langchain_community.document_loaders import PyPDFLoader
4
  from langchain_community.vectorstores import FAISS
5
  from langchain.text_splitter import RecursiveCharacterTextSplitter
@@ -14,23 +13,10 @@ import warnings
14
  warnings.warn = warn
15
  warnings.filterwarnings('ignore')
16
 
17
- from dotenv import load_dotenv
18
- load_dotenv()
19
-
20
- from langchain.prompts import PromptTemplate
21
- from langchain_huggingface import (
22
- HuggingFaceEmbeddings,
23
- HuggingFaceEndpoint,
24
- )
25
- from langchain.schema.runnable import RunnablePassthrough
26
- from langchain.schema.output_parser import StrOutputParser
27
-
28
- # --- 1) CONFIG / SAFETY ---
29
-
30
- if not os.getenv("HUGGINGFACEHUB_API_TOKEN"):
31
- print("HUGGINGFACEHUB_API_TOKEN not found. Add it to your Space secrets.")
32
- raise SystemExit(1)
33
-
34
 
35
  ## LLM - Using an open-source model from Hugging Face
36
  def get_llm():
@@ -41,6 +27,9 @@ def get_llm():
41
  repo_id = "mistralai/Mixtral-8x7B-Instruct-v0.1"
42
  llm = HuggingFaceHub(
43
  repo_id=repo_id,
 
 
 
44
  model_kwargs={"temperature": 0.1, "max_length": 512}
45
  )
46
  return llm
 
1
  import gradio as gr
 
2
  from langchain_community.document_loaders import PyPDFLoader
3
  from langchain_community.vectorstores import FAISS
4
  from langchain.text_splitter import RecursiveCharacterTextSplitter
 
13
  warnings.warn = warn
14
  warnings.filterwarnings('ignore')
15
 
16
+ # Set your Hugging Face API token here.
17
+ # For deployment on Hugging Face, you can set this as an environment variable.
18
+ import os
19
+ os.environ["HUGGINGFACEHUB_API_TOKEN"] = "hf_YOUR_HUGGINGFACE_TOKEN"
 
 
 
 
 
 
 
 
 
 
 
 
 
20
 
21
  ## LLM - Using an open-source model from Hugging Face
22
  def get_llm():
 
27
  repo_id = "mistralai/Mixtral-8x7B-Instruct-v0.1"
28
  llm = HuggingFaceHub(
29
  repo_id=repo_id,
30
+ # The 'task' parameter is required to specify the model's function.
31
+ # This fixes the 'Value error, Got invalid task None' error.
32
+ task="text-generation",
33
  model_kwargs={"temperature": 0.1, "max_length": 512}
34
  )
35
  return llm