pvanand commited on
Commit
ea680b0
·
verified ·
1 Parent(s): eeab01b

Update main.py

Browse files
Files changed (1) hide show
  1. main.py +12 -13
main.py CHANGED
@@ -23,19 +23,18 @@ SysPromptJson = "You are now in the role of an expert AI who can extract structu
23
  SysPromptMd = "You are an expert AI who can create a structured report using information provided in the context from user request.The report should be in markdown format consists of markdown tables structured into subtopics. Do not add any additional comments."
24
  SysPromptMdOffline = "You are an expert AI who can create a structured report using your knowledge on user request.The report should be in markdown format consists of markdown tables/lists/paragraphs as needed, structured into subtopics. Do not add any additional comments."
25
 
26
- @retry(tries=3, delay=1)
27
- def together_response(message, model = "meta-llama/Llama-3-8b-chat-hf", SysPrompt = SysPromptDefault,temperature=0.2):
28
- base_url_groq = "https://api.groq.com/openai/v1"
29
- groq_model_name="llama3-8b-8192"
30
- client = OpenAI(base_url= base_url_groq, api_key= GROQ_API_KEY)
31
- messages=[{"role": "system", "content": SysPrompt},{"role": "user", "content": message}]
32
-
33
- response = client.chat.completions.create(
34
- model=groq_model_name,
35
- messages=messages,
36
- temperature=temperature,
37
- )
38
- return response.choices[0].message.content
39
 
40
  def json_from_text(text):
41
  """
 
23
  SysPromptMd = "You are an expert AI who can create a structured report using information provided in the context from user request.The report should be in markdown format consists of markdown tables structured into subtopics. Do not add any additional comments."
24
  SysPromptMdOffline = "You are an expert AI who can create a structured report using your knowledge on user request.The report should be in markdown format consists of markdown tables/lists/paragraphs as needed, structured into subtopics. Do not add any additional comments."
25
 
26
+
27
+ def together_response(message, model = "meta-llama/Llama-3-8b-chat-hf", SysPrompt = SysPromptDefault, temperature=0.2):
28
+ client = Together(api_key=TOGETHER_API_KEY)
29
+
30
+ messages=[{"role": "system", "content": SysPrompt},{"role": "user", "content": message}]
31
+
32
+ response = client.chat.completions.create(
33
+ model=model,
34
+ messages=messages,
35
+ temperature=temperature,
36
+ )
37
+ return response.choices[0].message.content
 
38
 
39
  def json_from_text(text):
40
  """