File size: 3,167 Bytes
5ec59b0
 
 
 
 
 
 
0709a33
 
 
 
5ec59b0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77da7bd
5ec59b0
 
 
 
 
 
 
 
77da7bd
5ec59b0
 
 
 
 
 
 
 
 
 
 
 
77da7bd
5ec59b0
 
 
 
 
 
 
82c88a8
f131676
5ec59b0
0709a33
5ec59b0
328c6ef
77da7bd
 
 
 
 
328c6ef
5ec59b0
 
328c6ef
0709a33
5ec59b0
 
 
 
 
 
 
 
 
 
 
 
77da7bd
5ec59b0
 
 
 
 
0709a33
77da7bd
0709a33
 
5ec59b0
 
0709a33
 
 
 
5ec59b0
 
0709a33
5ec59b0
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
import os
import gradio as gr
from langchain_groq import ChatGroq
from langchain.memory import ConversationBufferMemory
from langchain.chains import ConversationChain
from langchain.prompts import PromptTemplate

MODEL_NAME = "llama-3.3-70b-versatile"
DEFAULT_API_KEY = os.getenv("GROQ_API_KEY", "")

def initialize_chatbot(api_key, model_name=MODEL_NAME):
    llm = ChatGroq(
        groq_api_key=api_key,
        model_name=model_name,
        temperature=0.7,
        max_tokens=1024
    )
    
    memory = ConversationBufferMemory(
        return_messages=True,
        memory_key="history"
    )
    
    template = """You are a helpful AI assistant. Have a natural conversation with the user.

Current conversation:
{history}
Human: {input}
AI Assistant:"""
    
    prompt = PromptTemplate(
        input_variables=["history", "input"],
        template=template
    )
    
    return ConversationChain(
        llm=llm,
        memory=memory,
        prompt=prompt,
        verbose=False
    )

conversation_chain = None

def chat_function(message, api_key):
    global conversation_chain
    
    if not api_key:
        return "Please provide a Groq API key to start chatting."
    
    if conversation_chain is None:
        try:
            conversation_chain = initialize_chatbot(api_key)
        except Exception as e:
            return f"Error initializing chatbot: {str(e)}"
    
    try:
        return conversation_chain.predict(input=message)
    except Exception as e:
        return f"Error: {str(e)}"

def reset_conversation():
    global conversation_chain
    conversation_chain = None

with gr.Blocks(title="LLM based Chatbot") as demo:
    gr.Markdown("# 🤖 LLM based Chatbot")
    gr.Markdown("Chat with an AI assistant powered by LangChain and Groq")
    gr.Markdown(f"**Model:** `{MODEL_NAME}`")
    
    if not DEFAULT_API_KEY:
        api_key_input = gr.Textbox(
            label="Groq API Key",
            placeholder="Enter your Groq API key here...",
            type="password"
        )
    else:
        api_key_input = gr.Textbox(
            type="password",
            value=DEFAULT_API_KEY,
            visible=False
        )
    
    chatbot = gr.Chatbot(height=400)
    
    with gr.Row():
        msg = gr.Textbox(
            label="Message",
            placeholder="Type your message here...",
            scale=4
        )
        submit_btn = gr.Button("Send", scale=1)
    
    clear_btn = gr.Button("Clear Conversation")
    
    def respond(message, chat_history, api_key):
        if not message.strip():
            return chat_history, ""
        
        chat_history.append({"role": "user", "content": message})
        bot_message = chat_function(message, api_key)
        chat_history.append({"role": "assistant", "content": bot_message})
        
        return chat_history, ""
    
    def clear_chat():
        reset_conversation()
        return []
    
    msg.submit(respond, [msg, chatbot, api_key_input], [chatbot, msg])
    submit_btn.click(respond, [msg, chatbot, api_key_input], [chatbot, msg])
    clear_btn.click(clear_chat, None, chatbot)

if __name__ == "__main__":
    demo.launch()