basic_chatbot / app.py
Dhanushlevi's picture
Create app.py
8328420 verified
import gradio as gr
from langchain_huggingface import HuggingFaceEndpoint
from secrets import HUGGING_FACE_TOKEN # Importing the token from a separate file
# Define your HuggingFace endpoint details
repo_id_1 = "mistralai/Mistral-7B-Instruct-v0.2"
repo_id_2 = "mistralai/Mistral-7B-Instruct-v0.3"
# Initialize the HuggingFace endpoints
llm_1 = HuggingFaceEndpoint(repo_id=repo_id_1, max_length=128, temperature=0.7, token=HUGGING_FACE_TOKEN)
llm_2 = HuggingFaceEndpoint(repo_id=repo_id_2, max_length=128, temperature=0.7, token=HUGGING_FACE_TOKEN)
# Define a function to get responses from both models
def get_combined_response(prompt):
response_1 = llm_1.invoke(prompt)
response_2 = llm_2.invoke(prompt)
combined_response = f"Model 1 Response: {response_1}\n\nModel 2 Response: {response_2}"
return combined_response
# Create a Gradio interface for the combined function
iface_combined = gr.Interface(fn=get_combined_response, inputs="text", outputs="text", title="Combined Machine Learning Chatbots")
# Launch the Gradio app
iface_combined.launch()