AIPretender commited on
Commit
b77baa1
·
verified ·
1 Parent(s): 388b0c4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -5
app.py CHANGED
@@ -2,15 +2,27 @@ from langchain_community.document_loaders import PyPDFLoader
2
  import gradio as gr
3
  from langchain.chains.summarize import load_summarize_chain
4
  from huggingface_hub import InferenceClient
5
- from langchain_huggingface import HuggingFaceHub
6
  import os
7
  # Set your Hugging Face token securely
8
- os.environ["HUGGINGFACEHUB_API_TOKEN"] = ""
9
 
10
  # Create the LLM
11
- llm = HuggingFaceHub(
12
- repo_id="facebook/bart-large-cnn", # Summarization-capable model
13
- model_kwargs={"temperature": 0.7, "max_length": 512}
 
 
 
 
 
 
 
 
 
 
 
 
14
  )
15
 
16
 
 
2
  import gradio as gr
3
  from langchain.chains.summarize import load_summarize_chain
4
  from huggingface_hub import InferenceClient
5
+ from langchain_huggingface import HuggingFaceEndpoint
6
  import os
7
  # Set your Hugging Face token securely
8
+ HUGGINGFACEHUB_API_TOKEN = os.environ["HUGGINGFACEHUB_API_TOKEN"]
9
 
10
  # Create the LLM
11
+ #llm = HuggingFaceHub(
12
+ # repo_id="facebook/bart-large-cnn", # Summarization-capable model
13
+ # model_kwargs={"temperature": 0.7, "max_length": 512}
14
+ #)
15
+ repo_id = "deepseek-ai/DeepSeek-R1-0528"
16
+
17
+ llm = HuggingFaceEndpoint(
18
+ repo_id=repo_id,
19
+ max_length=128,
20
+ temperature=0.5,
21
+ huggingfacehub_api_token=HUGGINGFACEHUB_API_TOKEN,
22
+ provider="auto", # set your provider here hf.co/settings/inference-providers
23
+ # provider="hyperbolic",
24
+ # provider="nebius",
25
+ # provider="together",
26
  )
27
 
28