shlokamhaisekar commited on
Commit
6a3d2df
·
verified ·
1 Parent(s): f709175

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -4
app.py CHANGED
@@ -1,11 +1,21 @@
1
  from huggingface_hub import InferenceClient
2
-
 
 
3
  import gradio as gr
4
  import random
5
 
6
- client = InferenceClient("Qwen/Qwen2.5-72B-Instruct")
7
  #deepseek-ai/DeepSeek-R1-Distill-Qwen-32B
8
 
 
 
 
 
 
 
 
 
9
  def respond(message, history):
10
  #responses = ["Yes", "No"]
11
  #return random.choice(responses)
@@ -22,10 +32,10 @@ def respond(message, history):
22
 
23
  messages.append(
24
  {"role":"user",
25
- "content": message}
26
  )
27
 
28
- response = client.chat_completion(messages, max_tokens = 100, temperature = 1.3, top_p = .3)
29
  #temperature and top_p control randomness
30
 
31
 
 
1
  from huggingface_hub import InferenceClient
2
+ #STEP1FROMSEMANTICSEARCH (import libraries)
3
+ from sentence_transformers import SentenceTransformer
4
+ import torch
5
  import gradio as gr
6
  import random
7
 
8
+ client=InferenceClient("Qwen/Qwen2.5-72B-Instruct")
9
  #deepseek-ai/DeepSeek-R1-Distill-Qwen-32B
10
 
11
+ # Open the water_cycle.txt file in read mode with UTF-8 encoding - step 2 from semantic search
12
+ with open("recipes.txt", "r", encoding="utf-8") as file:
13
+ # Read the entire contents of the file and store it in a variable
14
+ recipes_text = file.read()
15
+
16
+ # Print the text below
17
+ print(recipes_text)
18
+
19
  def respond(message, history):
20
  #responses = ["Yes", "No"]
21
  #return random.choice(responses)
 
32
 
33
  messages.append(
34
  {"role":"user",
35
+ "content": "message"}
36
  )
37
 
38
+ response = client.chat_completion(messages, max_tokens=100, temperature=1.3, top_p=.2)
39
  #temperature and top_p control randomness
40
 
41