Spaces:
Sleeping
Sleeping
| from groq import Groq | |
| import gradio as gr | |
| import os | |
| client = Groq( | |
| api_key=os.environ.get("GROQ_KEY"), | |
| ) | |
| # Function to generate a response based on user input | |
| def get_response_from_llama(prompt): | |
| chat_completion = client.chat.completions.create( | |
| messages=[{"role": "user", "content": prompt}], | |
| model="llama3-8b-8192", | |
| ) | |
| return chat_completion.choices[0].message.content | |
| # Create the chat interface | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# 🦙 LLaMA Chatbot") | |
| gr.Markdown("### Enter a prompt below to chat with the LLaMA model!") | |
| # Chatbox to show conversation | |
| chatbot = gr.Chatbot() | |
| # Input text box | |
| with gr.Row(): | |
| with gr.Column(scale=10): | |
| txt = gr.Textbox( | |
| show_label=False, | |
| placeholder="Type your message...", | |
| ) | |
| with gr.Column(scale=1, min_width=100): | |
| send_btn = gr.Button("Send") | |
| def user(message, history): | |
| return "", history + [[message, None]] | |
| def bot(history): | |
| response = get_response_from_llama(history[-1][0]) | |
| history[-1][1] = response | |
| return history | |
| # Linking components together | |
| txt.submit(user, [txt, chatbot], [txt, chatbot], queue=False).then( | |
| bot, chatbot, chatbot | |
| ) | |
| send_btn.click(user, [txt, chatbot], [txt, chatbot], queue=False).then( | |
| bot, chatbot, chatbot | |
| ) | |
| demo.launch(share=True) | |