Spaces:
Sleeping
Sleeping
File size: 1,476 Bytes
53238d5 9f1646d 53238d5 9f1646d 53238d5 9f1646d | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 | import gradio as gr
import os
from langchain_groq import ChatGroq
from langchain_core.messages import HumanMessage, AIMessage, SystemMessage
# 1. Initialize the Chat Model
# We use the specific Groq integration as described in LangChain's provider docs
llm = ChatGroq(
groq_api_key=os.environ.get("GROQ_API"),
model_name="llama-3.3-70b-versatile",
temperature=0.7
)
# 2. Define the Chat Function
def chat_function(message, history):
# The docs recommend using a list of specific message types
conversation_messages = []
# Add a System Message to define behavior
conversation_messages.append(SystemMessage(content="You are a helpful assistant."))
# Reconstruct history from Gradio's list
for user_text, ai_text in history:
if user_text:
conversation_messages.append(HumanMessage(content=user_text))
if ai_text:
conversation_messages.append(AIMessage(content=ai_text))
# Add the user's latest message
conversation_messages.append(HumanMessage(content=message))
# Invoke the model with the full list
response = llm.invoke(conversation_messages)
# Return the text content of the response
return response.content
# 3. Launch the Interface
demo = gr.ChatInterface(
fn=chat_function,
title="🤖 Groq Chatbot",
description="A simple chatbot using LangChain's message history structures."
)
if __name__ == "__main__":
demo.launch() |