Update app.py
Browse files
app.py
CHANGED
|
@@ -5,7 +5,7 @@ from groq import Groq
|
|
| 5 |
import gradio as gr
|
| 6 |
from groq import Groq
|
| 7 |
|
| 8 |
-
def generate_response(
|
| 9 |
client = Groq()
|
| 10 |
|
| 11 |
stream = client.chat.completions.create(
|
|
@@ -31,7 +31,7 @@ def generate_response(input_text, model, temperature, max_tokens, top_p):
|
|
| 31 |
|
| 32 |
# Define the Gradio chat interface
|
| 33 |
additional_inputs = [
|
| 34 |
-
gr.Dropdown(choices=["mixtral-8x7b-32768", "
|
| 35 |
gr.Slider(minimum=0.0, maximum=1.0, step=0.01, label="Temperature"),
|
| 36 |
gr.Slider(minimum=1, maximum=4096, step=1, label="Max Tokens"),
|
| 37 |
gr.Slider(minimum=0.0, maximum=1.0, step=0.01, label="Top P"),
|
|
|
|
| 5 |
import gradio as gr
|
| 6 |
from groq import Groq
|
| 7 |
|
| 8 |
+
def generate_response(prompt, history, model, temperature, max_tokens, top_p):
|
| 9 |
client = Groq()
|
| 10 |
|
| 11 |
stream = client.chat.completions.create(
|
|
|
|
| 31 |
|
| 32 |
# Define the Gradio chat interface
|
| 33 |
additional_inputs = [
|
| 34 |
+
gr.Dropdown(choices=["llama3-70b-8192", "llama3-8b-8192", "mixtral-8x7b-32768", "llama2-70b-4096", "gemma-7b-it"], label="Model"),
|
| 35 |
gr.Slider(minimum=0.0, maximum=1.0, step=0.01, label="Temperature"),
|
| 36 |
gr.Slider(minimum=1, maximum=4096, step=1, label="Max Tokens"),
|
| 37 |
gr.Slider(minimum=0.0, maximum=1.0, step=0.01, label="Top P"),
|