aartstudio commited on
Commit
4d342a7
·
verified ·
1 Parent(s): 7aed3bc

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +94 -0
  2. requirements.txt +2 -0
app.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """app.ipynb
3
+
4
+ Automatically generated by Colab.
5
+
6
+ Original file is located at
7
+ https://colab.research.google.com/drive/1g3WXzYMdAfSHH2i3i1Ezy2DQUYYifFJI
8
+ """
9
+
10
+ !pip install groq gradio
11
+
12
+ import os
13
+ from groq import Groq
14
+ import gradio as gr
15
+
16
+ # ---------- SET YOUR API KEY ----------
17
+ # Option 1: Set as environment variable before running:
18
+ # export GROQ_API_KEY="your_api_key_here"
19
+ # Option 2: Put it directly here (less safe):
20
+ # os.environ["GROQ_API_KEY"] = "your_api_key_here"
21
+
22
+ #the below 3 lines is for colab, comment if you are using HF
23
+ #from google.colab import userdata
24
+ #GROQ_API_KEY = userdata.get('GROQ_API_KEY')
25
+ #client = Groq(api_key=GROQ_API_KEY)
26
+
27
+ #the below 1 lines is for HF, comment if you are using colab
28
+ client = Groq(api_key=os.environ.get("GROQ_API_KEY"))
29
+
30
+ # Choose a Llama model available on Groq, e.g.:
31
+ MODEL_NAME = "llama-3.3-70b-versatile" # check console for latest names
32
+
33
+
34
+ def chat_with_groq(message, history):
35
+ """
36
+ message: latest user input (string)
37
+ history: list of [user, assistant] pairs from Gradio
38
+ returns: assistant reply (string)
39
+ """
40
+
41
+ # Convert Gradio history into Groq-style messages
42
+ messages = []
43
+ for user_msg, bot_msg in history:
44
+ messages.append({"role": "user", "content": user_msg})
45
+ if bot_msg is not None:
46
+ messages.append({"role": "assistant", "content": bot_msg})
47
+
48
+ # Add latest user message
49
+ messages.append({"role": "user", "content": message})
50
+
51
+ # Call Groq chat completion
52
+ response = client.chat.completions.create(
53
+ model=MODEL_NAME,
54
+ messages=messages,
55
+ temperature=0.7,
56
+ max_tokens=512,
57
+ )
58
+
59
+ reply = response.choices[0].message.content
60
+ return reply
61
+
62
+
63
+ # ---------- GRADIO UI ----------
64
+ with gr.Blocks() as demo:
65
+ gr.Markdown("# 💬 Groq Llama Chatbot")
66
+ gr.Markdown(
67
+ "Chat with a Llama model served through the Groq API. "
68
+ "Make sure you set your `GROQ_API_KEY` before running."
69
+ )
70
+
71
+ chatbot = gr.Chatbot(height=400)
72
+ msg = gr.Textbox(label="Type your message here")
73
+ clear = gr.Button("Clear")
74
+
75
+ def user_send(user_message, chat_history):
76
+ # Append user message to history; bot response handled by second fn
77
+ chat_history = chat_history + [[user_message, None]]
78
+ return "", chat_history
79
+
80
+ def bot_reply(chat_history):
81
+ user_message = chat_history[-1][0]
82
+ bot_answer = chat_with_groq(user_message, chat_history[:-1])
83
+ chat_history[-1][1] = bot_answer
84
+ return chat_history
85
+
86
+ msg.submit(user_send, [msg, chatbot], [msg, chatbot]).then(
87
+ bot_reply, [chatbot], [chatbot]
88
+ )
89
+ clear.click(lambda: None, None, chatbot, queue=False)
90
+
91
+ # launch app
92
+ if __name__ == "__main__":
93
+ demo.launch()
94
+
requirements.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ groq
2
+ gradio