Yashvj123 commited on
Commit
bb173e2
·
verified ·
1 Parent(s): 7082148

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -5
app.py CHANGED
@@ -9,25 +9,45 @@ from langchain_core.messages import AIMessage, HumanMessage, SystemMessage
9
  os.environ["HUGGINGFACEHUB_API_KEY"]= os.getenv("HUGGINGFACEHUB_API_KEY")
10
  os.environ["HF_TOKEN"]= os.getenv("HUGGINGFACEHUB_API_KEY")
11
 
12
- # Define Hugging Face model
13
  model = HuggingFaceEndpoint(
14
- repo_id="meta-llama/Llama-3.2-3B-Instruct",
15
  provider="nebius",
16
  temperature=0.6,
17
- max_new_tokens=300, # Increase for more complete outputs
18
  task="conversational"
19
  )
20
 
21
  # Wrap it into ChatHuggingFace interface
22
  llama_model = ChatHuggingFace(
23
  llm=model,
24
- repo_id="meta-llama/Llama-3.2-3B-Instruct",
25
  provider="nebius",
26
  temperature=0.6,
27
- max_new_tokens=300,
28
  task="conversational"
29
  )
30
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  # Initialize session message history
32
  if "message_history" not in st.session_state:
33
  st.session_state.message_history = [
 
9
  os.environ["HUGGINGFACEHUB_API_KEY"]= os.getenv("HUGGINGFACEHUB_API_KEY")
10
  os.environ["HF_TOKEN"]= os.getenv("HUGGINGFACEHUB_API_KEY")
11
 
12
+
13
  model = HuggingFaceEndpoint(
14
+ repo_id="deepseek-ai/DeepSeek-R1",
15
  provider="nebius",
16
  temperature=0.6,
17
+ max_new_tokens=200,
18
  task="conversational"
19
  )
20
 
21
  # Wrap it into ChatHuggingFace interface
22
  llama_model = ChatHuggingFace(
23
  llm=model,
24
+ repo_id="deepseek-ai/DeepSeek-R1",
25
  provider="nebius",
26
  temperature=0.6,
27
+ max_new_tokens=200,
28
  task="conversational"
29
  )
30
 
31
+
32
+ # Define Hugging Face model
33
+ # model = HuggingFaceEndpoint(
34
+ # repo_id="meta-llama/Llama-3.2-3B-Instruct",
35
+ # provider="nebius",
36
+ # temperature=0.6,
37
+ # max_new_tokens=300, # Increase for more complete outputs
38
+ # task="conversational"
39
+ # )
40
+
41
+ # # Wrap it into ChatHuggingFace interface
42
+ # llama_model = ChatHuggingFace(
43
+ # llm=model,
44
+ # repo_id="meta-llama/Llama-3.2-3B-Instruct",
45
+ # provider="nebius",
46
+ # temperature=0.6,
47
+ # max_new_tokens=300,
48
+ # task="conversational"
49
+ # )
50
+
51
  # Initialize session message history
52
  if "message_history" not in st.session_state:
53
  st.session_state.message_history = [