msmaje commited on
Commit
10baa36
·
verified ·
1 Parent(s): 25a1eaf

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +41 -22
app.py CHANGED
@@ -17,12 +17,16 @@ try:
17
  from langchain_community.vectorstores import FAISS
18
  from langchain.prompts import PromptTemplate
19
  from langchain.chains import RetrievalQA
20
- # This is the key change: Import HuggingFaceHub instead of HuggingFaceEndpoint
21
- from langchain_community.llms import HuggingFaceHub
22
  LANGCHAIN_AVAILABLE = True
23
  except ImportError as e:
24
- logger.error(f"LangChain import error: {e}")
25
- LANGCHAIN_AVAILABLE = False
 
 
 
 
 
26
 
27
  # Create PDFs folder if it doesn't exist
28
  PDF_FOLDER_PATH = "./pdfs"
@@ -63,23 +67,38 @@ def create_llm():
63
  hf_token = os.getenv("HUGGINGFACEHUB_API_TOKEN")
64
 
65
  try:
66
- # The crucial change: Use HuggingFaceHub directly as suggested
67
- # Note: You need to specify a repo_id that is a text generation model.
68
- # "mistralai/Mistral-7B-Instruct-v0.2" is a good choice for instruction following.
69
- llm = HuggingFaceHub(
70
- repo_id="mistralai/Mistral-7B-Instruct-v0.2", # Using the suggested model
71
- huggingfacehub_api_token=hf_token,
72
- model_kwargs={
73
- "temperature": 0.7,
74
- "max_length": 512, # Note: max_new_tokens is typically preferred for generation length
75
- "do_sample": True,
76
- "top_p": 0.9,
77
- "top_k": 50
78
- }
79
- )
80
- logger.info(f"Successfully initialized LLM with model: mistralai/Mistral-7B-Instruct-v0.2")
81
- return llm
82
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
83
  except Exception as e:
84
  logger.error(f"LLM creation error: {e}")
85
  # Return a simple fallback that doesn't use HuggingFace API
@@ -529,7 +548,7 @@ def create_interface():
529
  if PRELOADED_PDFS:
530
  gr.Markdown("""
531
  <div style="background: linear-gradient(90deg, #10b981, #059669);
532
- color: white; padding: 12px; border-radius: 8px; margin: 10px 0;">
533
  🎉 <strong>Pre-loaded PDFs detected!</strong> Use the 'Load Pre-existing PDFs' button to get started quickly.
534
  </div>
535
  """)
 
17
  from langchain_community.vectorstores import FAISS
18
  from langchain.prompts import PromptTemplate
19
  from langchain.chains import RetrievalQA
20
+ from langchain_huggingface import HuggingFaceEndpoint # Updated import
 
21
  LANGCHAIN_AVAILABLE = True
22
  except ImportError as e:
23
+ try:
24
+ # Fallback to older import structure
25
+ from langchain_community.llms import HuggingFaceEndpoint
26
+ LANGCHAIN_AVAILABLE = True
27
+ except ImportError as e2:
28
+ logger.error(f"LangChain import error: {e}, {e2}")
29
+ LANGCHAIN_AVAILABLE = False
30
 
31
  # Create PDFs folder if it doesn't exist
32
  PDF_FOLDER_PATH = "./pdfs"
 
67
  hf_token = os.getenv("HUGGINGFACEHUB_API_TOKEN")
68
 
69
  try:
70
+ # Try different model configurations
71
+ models_to_try = [
72
+ "microsoft/DialoGPT-medium",
73
+ "google/flan-t5-base",
74
+ "microsoft/DialoGPT-small",
75
+ "tiiuae/falcon-7b-instruct"
76
+ ]
77
+
78
+ for model_id in models_to_try:
79
+ try:
80
+ llm = HuggingFaceEndpoint(
81
+ repo_id=model_id,
82
+ temperature=0.7,
83
+ max_new_tokens=512,
84
+ huggingfacehub_api_token=hf_token,
85
+ model_kwargs={
86
+ "max_length": 512,
87
+ "do_sample": True,
88
+ "temperature": 0.7,
89
+ "top_p": 0.9,
90
+ "top_k": 50
91
+ }
92
+ )
93
+ logger.info(f"Successfully initialized LLM with model: {model_id}")
94
+ return llm
95
+ except Exception as model_error:
96
+ logger.warning(f"Failed to initialize {model_id}: {model_error}")
97
+ continue
98
+
99
+ # If all models fail, raise the last error
100
+ raise Exception("All model initialization attempts failed")
101
+
102
  except Exception as e:
103
  logger.error(f"LLM creation error: {e}")
104
  # Return a simple fallback that doesn't use HuggingFace API
 
548
  if PRELOADED_PDFS:
549
  gr.Markdown("""
550
  <div style="background: linear-gradient(90deg, #10b981, #059669);
551
+ color: white; padding: 12px; border-radius: 8px; margin: 10px 0;">
552
  🎉 <strong>Pre-loaded PDFs detected!</strong> Use the 'Load Pre-existing PDFs' button to get started quickly.
553
  </div>
554
  """)