Yashvj123 commited on
Commit
7194e7c
·
verified ·
1 Parent(s): bb173e2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -24
app.py CHANGED
@@ -10,44 +10,44 @@ os.environ["HUGGINGFACEHUB_API_KEY"]= os.getenv("HUGGINGFACEHUB_API_KEY")
10
  os.environ["HF_TOKEN"]= os.getenv("HUGGINGFACEHUB_API_KEY")
11
 
12
 
13
- model = HuggingFaceEndpoint(
14
- repo_id="deepseek-ai/DeepSeek-R1",
15
- provider="nebius",
16
- temperature=0.6,
17
- max_new_tokens=200,
18
- task="conversational"
19
- )
20
-
21
- # Wrap it into ChatHuggingFace interface
22
- llama_model = ChatHuggingFace(
23
- llm=model,
24
- repo_id="deepseek-ai/DeepSeek-R1",
25
- provider="nebius",
26
- temperature=0.6,
27
- max_new_tokens=200,
28
- task="conversational"
29
- )
30
-
31
-
32
- # Define Hugging Face model
33
  # model = HuggingFaceEndpoint(
34
- # repo_id="meta-llama/Llama-3.2-3B-Instruct",
35
  # provider="nebius",
36
  # temperature=0.6,
37
- # max_new_tokens=300, # Increase for more complete outputs
38
  # task="conversational"
39
  # )
40
 
41
  # # Wrap it into ChatHuggingFace interface
42
  # llama_model = ChatHuggingFace(
43
  # llm=model,
44
- # repo_id="meta-llama/Llama-3.2-3B-Instruct",
45
  # provider="nebius",
46
  # temperature=0.6,
47
- # max_new_tokens=300,
48
  # task="conversational"
49
  # )
50
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
  # Initialize session message history
52
  if "message_history" not in st.session_state:
53
  st.session_state.message_history = [
 
10
  os.environ["HF_TOKEN"]= os.getenv("HUGGINGFACEHUB_API_KEY")
11
 
12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
  # model = HuggingFaceEndpoint(
14
+ # repo_id="deepseek-ai/DeepSeek-R1",
15
  # provider="nebius",
16
  # temperature=0.6,
17
+ # max_new_tokens=200,
18
  # task="conversational"
19
  # )
20
 
21
  # # Wrap it into ChatHuggingFace interface
22
  # llama_model = ChatHuggingFace(
23
  # llm=model,
24
+ # repo_id="deepseek-ai/DeepSeek-R1",
25
  # provider="nebius",
26
  # temperature=0.6,
27
+ # max_new_tokens=200,
28
  # task="conversational"
29
  # )
30
 
31
+
32
+ Define Hugging Face model
33
+ model = HuggingFaceEndpoint(
34
+ repo_id="meta-llama/Llama-3.2-3B-Instruct",
35
+ provider="nebius",
36
+ temperature=0.6,
37
+ max_new_tokens=300, # Increase for more complete outputs
38
+ task="conversational"
39
+ )
40
+
41
+ # Wrap it into ChatHuggingFace interface
42
+ llama_model = ChatHuggingFace(
43
+ llm=model,
44
+ repo_id="meta-llama/Llama-3.2-3B-Instruct",
45
+ provider="nebius",
46
+ temperature=0.6,
47
+ max_new_tokens=300,
48
+ task="conversational"
49
+ )
50
+
51
  # Initialize session message history
52
  if "message_history" not in st.session_state:
53
  st.session_state.message_history = [