Spaces:
Build error
Build error
Rename LLM_Evaluation.py to helpers.py
Browse files- LLM_Evaluation.py +0 -0
- helpers.py +34 -0
LLM_Evaluation.py
DELETED
|
File without changes
|
helpers.py
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import requests
|
| 2 |
+
import os
|
| 3 |
+
from groq import Groq
|
| 4 |
+
#from dotenv import load_dotenv
|
| 5 |
+
#load_dotenv()
|
| 6 |
+
GROQ_API_KEY = "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"
|
| 7 |
+
client = Groq(api_key="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",)
|
| 8 |
+
#client = Groq(
|
| 9 |
+
# api_key=os.environ.get('GROQ_API_KEY'),
|
| 10 |
+
#)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def fetch_models():
|
| 14 |
+
exclude_models = ['distil-whisper-large-v3-en', 'whisper-large-v3','whisper-large-v3-turbo', "llama3-groq-8b-8192-tool-use-preview", "llama3-groq-70b-8192-tool-use-preview"]
|
| 15 |
+
url = "https://api.groq.com/openai/v1/models"
|
| 16 |
+
api_key = GROQ_API_KEY#os.environ.get('GROQ_API_KEY')
|
| 17 |
+
headers = {
|
| 18 |
+
"Authorization": f"Bearer {api_key}",
|
| 19 |
+
"Content-Type": "application/json"
|
| 20 |
+
}
|
| 21 |
+
response = requests.get(url, headers=headers).json()['data']
|
| 22 |
+
if len(response) != 0:
|
| 23 |
+
return [model['id'] for model in response if model['id'] not in exclude_models]
|
| 24 |
+
else:
|
| 25 |
+
print("Error in fetching models!")
|
| 26 |
+
return []
|
| 27 |
+
|
| 28 |
+
print("selected_model", "<===>","prompt")
|
| 29 |
+
def generate_content(selected_model, prompt):
|
| 30 |
+
chat_completion = client.chat.completions.create(
|
| 31 |
+
messages=[{"role": "user", "content":prompt }],
|
| 32 |
+
model=selected_model,
|
| 33 |
+
)
|
| 34 |
+
return chat_completion.choices[0].message.content
|