kaushaa commited on
Commit
ce415dc
·
verified ·
1 Parent(s): e1f4d52

Create utils/helper.py

Browse files
Files changed (1) hide show
  1. utils/helper.py +47 -0
utils/helper.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+ import pandas as pd
4
+
5
+ os.system("pip install together")
6
+
7
+ from together import Together
8
+
9
+ import os
10
+
11
+ # Set the environment variable (just for testing purposes)
12
+ os.environ["api"] = "fdd863b273943eaa3c7397b49ae6f763a930f8007ff3843382a3fc6127641e66"
13
+
14
+ # Now, the code can access the api key from the environment variable
15
+ client = Together(api_key=os.environ["api"])
16
+
17
+
18
+
19
+
20
+
21
+ def call_llama(prompt: str) -> str:
22
+ """
23
+ Send a prompt to the Llama model and return the response.
24
+ Args:
25
+ prompt (str): The input prompt to send to the Llama model.
26
+ Returns:
27
+ str: The response from the Llama model.
28
+ """
29
+
30
+ # Create a completion request with the prompt
31
+ response = client.chat.completions.create(
32
+
33
+ # Use the Llama-3-8b-chat-hf model
34
+ model="meta-llama/Llama-3-8b-chat-hf",
35
+
36
+ # Define the prompt as a user message
37
+ messages=[
38
+ {
39
+ "role": "user",
40
+ "content": prompt # Use the input prompt
41
+ }
42
+ ],
43
+ temperature=0.7,
44
+ )
45
+
46
+ # Return the content of the first response message
47
+ return response.choices[0].message.content