Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| import os | |
| from langchain_groq import ChatGroq | |
| from langchain_core.messages import HumanMessage, AIMessage, SystemMessage | |
| # 1. Initialize the Chat Model | |
| # We use the specific Groq integration as described in LangChain's provider docs | |
| llm = ChatGroq( | |
| groq_api_key=os.environ.get("GROQ_API"), | |
| model_name="llama-3.3-70b-versatile", | |
| temperature=0.7 | |
| ) | |
| # 2. Define the Chat Function | |
| def chat_function(message, history): | |
| # The docs recommend using a list of specific message types | |
| conversation_messages = [] | |
| # Add a System Message to define behavior | |
| conversation_messages.append(SystemMessage(content="You are a helpful assistant.")) | |
| # Reconstruct history from Gradio's list | |
| for user_text, ai_text in history: | |
| if user_text: | |
| conversation_messages.append(HumanMessage(content=user_text)) | |
| if ai_text: | |
| conversation_messages.append(AIMessage(content=ai_text)) | |
| # Add the user's latest message | |
| conversation_messages.append(HumanMessage(content=message)) | |
| # Invoke the model with the full list | |
| response = llm.invoke(conversation_messages) | |
| # Return the text content of the response | |
| return response.content | |
| # 3. Launch the Interface | |
| demo = gr.ChatInterface( | |
| fn=chat_function, | |
| title="🤖 Groq Chatbot", | |
| description="A simple chatbot using LangChain's message history structures." | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch() |