Spaces:
Sleeping
Sleeping
| from langchain_core.prompts import ChatPromptTemplate | |
| from langchain_openai import ChatOpenAI | |
| from langchain_groq import ChatGroq | |
| import streamlit as st | |
| from langchain_core.output_parsers import StrOutputParser | |
| import os | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| os.environ['LANGCHAIN_TRACING_V2'] = "true" | |
| os.environ['LANGCHAIN_API_KEY'] = os.getenv("LANGCHAIN_API_KEY") | |
| def get_llm_response(llm_choice, input_text): | |
| output_parser = StrOutputParser() | |
| if llm_choice == "OpenAI": | |
| llm = ChatOpenAI(temperature=0.5, model="mistralai/mistral-7b-instruct:free",base_url="https://openrouter.ai/api/v1",api_key=os.getenv("OPENAI_API_KEY")) | |
| else: | |
| llm = ChatGroq(groq_api_key=os.getenv("GROQ_API_KEY"),model_name="mixtral-8x7b-32768") | |
| chain = prompt | llm | output_parser | |
| if input_text: | |
| return chain.invoke({"question": input_text}) | |
| else: | |
| return None | |
| prompt = ChatPromptTemplate.from_messages( | |
| [ | |
| ("system", "You are a helpful assistant. Please respond to the queries"), | |
| ("user", "Question: {question}") | |
| ] | |
| ) | |
| st.title("Chat with OpenAI and ChatGroq") | |
| st.caption("Made By - Samagra Shrivastava with ♥") | |
| input_text = st.chat_input("Enter your question here..") | |
| llm_options = ['OpenAI', 'ChatGroq'] | |
| with st.sidebar: | |
| st.title("Select the model of your choice") | |
| llm_choice = st.selectbox("Choose LLM of your choice", llm_options) | |
| response = get_llm_response(llm_choice=llm_choice, input_text=input_text) | |
| if response: | |
| st.write(f"**Response from {llm_choice}:**") | |
| st.write(response) |