| import streamlit as st |
| import os |
| import pandas as pd |
| from together import Together |
|
|
|
|
| client = Together(api_key=os.environ["TOGETHER_API_KEY"]) |
|
|
|
|
| def call_llama(prompt: str) -> str: |
| """ |
| Send a prompt to the Llama model and return the response. |
| Args: |
| prompt (str): The input prompt to send to the Llama model. |
| Returns: |
| str: The response from the Llama model. |
| """ |
|
|
| |
| response = client.chat.completions.create( |
|
|
| |
| model="meta-llama/Llama-3-8b-chat-hf", |
|
|
| |
| messages=[ |
| { |
| "role": "user", |
| "content": prompt |
| } |
| ], |
| temperature=0.7, |
| ) |
|
|
| |
| return response.choices[0].message.content |
|
|