File size: 1,630 Bytes
918b7b6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
from langchain_core.prompts import ChatPromptTemplate
from langchain_openai import ChatOpenAI
from langchain_groq import ChatGroq
import streamlit as st
from langchain_core.output_parsers import StrOutputParser
import os
from dotenv import load_dotenv

load_dotenv()

os.environ['LANGCHAIN_TRACING_V2'] = "true"
os.environ['LANGCHAIN_API_KEY'] = os.getenv("LANGCHAIN_API_KEY")


def get_llm_response(llm_choice, input_text):
    output_parser = StrOutputParser()

    if llm_choice == "OpenAI":
        llm = ChatOpenAI(temperature=0.5, model="mistralai/mistral-7b-instruct:free",base_url="https://openrouter.ai/api/v1",api_key=os.getenv("OPENAI_API_KEY"))
    else:
        llm = ChatGroq(groq_api_key=os.getenv("GROQ_API_KEY"),model_name="mixtral-8x7b-32768")
    
    chain = prompt | llm | output_parser

    if input_text:
        return chain.invoke({"question": input_text})
    else:
        return None

prompt = ChatPromptTemplate.from_messages(
    [
        ("system", "You are a helpful assistant. Please respond to the queries"),
        ("user", "Question: {question}")
    ]
)

st.title("Chat with OpenAI and ChatGroq")
st.caption("Made By - Samagra Shrivastava with ♥")
input_text = st.chat_input("Enter your question here..")

llm_options = ['OpenAI', 'ChatGroq']
with st.sidebar:
    st.title("Select the model of your choice")
    llm_choice = st.selectbox("Choose LLM of your choice", llm_options)

response = get_llm_response(llm_choice=llm_choice, input_text=input_text)

if response:
    st.write(f"**Response from {llm_choice}:**")
    st.write(response)