Spaces:
Sleeping
Sleeping
| from langchain_groq import ChatGroq | |
| from langchain_core.prompts import ChatPromptTemplate | |
| from langchain_core.output_parsers import StrOutputParser | |
| import streamlit as st | |
| import os | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| ## Langsmith Tracking | |
| os.environ['LANGCHAIN_API_KEY'] = os.getenv('LANGCHAIN_API_KEY') | |
| os.environ['LANGCHAIN_TRACING_V2'] = 'true' | |
| os.environ['LANGCHAIN_PROJECT'] = "Simple Q&A Chatbot With OpenAI" | |
| os.environ['GROQ_API_KEY'] = os.getenv('GROQ_API_KEY') | |
| ## Prompt Template | |
| propmt = ChatPromptTemplate( | |
| [ | |
| ("system", "You are an helpful AI assistant. Please answer to the questions to the best of your ability."), | |
| ("human","question:{question}"), | |
| ] | |
| ) | |
| def get_response(question, model): | |
| model = ChatGroq(model=model) | |
| output_parser = StrOutputParser() | |
| chain = propmt|model|output_parser | |
| response = chain.invoke({"question": question}) | |
| return response | |
| st.title("Simple Q&A Chatbot With GROQ") | |
| model = st.selectbox("Select a model", ["llama-3.1-8b-instant","gemma2-9b-it","llama-3.1-70b-Versatile"]) | |
| st.write("This is a simple Q&A chatbot that uses GROQ to answer questions. Ask any question and the chatbot will try to answer it.") | |
| input_question = st.text_input("Ask a question") | |
| if input_question: | |
| response = get_response(input_question, model) | |
| st.write(response) | |
| else: | |
| st.write("Please ask a question") | |