Spaces:
Runtime error
Runtime error
File size: 1,416 Bytes
353997e 5de60f1 353997e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 |
import gradio as gr
from langchain.llms import HuggingFacePipeline
from langchain.chains import ConversationChain
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
# Load pre-trained model and tokenizer from Hugging Face
model_name = "HuggingFaceH4/zephyr-7b-beta" # You can replace this with other conversational models
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name)
# Create a pipeline for conversational tasks
hf_pipeline = pipeline("text-generation", model=model, tokenizer=tokenizer)
# Wrap the pipeline in a LangChain LLM
llm = HuggingFacePipeline(pipeline=hf_pipeline)
# Create a conversation chain with memory
from langchain.memory import ConversationBufferMemory
memory = ConversationBufferMemory()
conversation = ConversationChain(llm=llm, memory=memory)
# Define a function for Gradio to handle conversation
def chatbot(user_input):
response = conversation.run(user_input)
return response
# Gradio UI
with gr.Blocks() as demo:
gr.Markdown("## 🤖 Chatbot with Hugging Face and LangChain")
chatbot_interface = gr.Chatbot()
user_input = gr.Textbox(label="Type your message:", placeholder="Say something...")
submit_button = gr.Button("Send")
# Bind the input and output
submit_button.click(chatbot, inputs=user_input, outputs=chatbot_interface)
# Launch the app
demo.launch()
|