File size: 1,233 Bytes
eb7ee60
 
 
 
8f6566f
 
 
eb7ee60
919707c
 
0c26562
8f6566f
 
eb7ee60
 
 
8f6566f
 
eb7ee60
 
 
 
8f6566f
 
eb7ee60
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
# Import necessary modules
from langchain.prompts import ChatPromptTemplate # type: ignore
from langchain.llms import Ollama # type: ignore
import streamlit as st # type: ignore

# Streamlit setup
st.title("Subbu Chat Bot")
input_txt = st.text_input("Enter your queries here...")

# Add a dropdown for model selection
model_choice = st.selectbox("Select the model:", ["Llama 3.2", "Llama 3.1", "Code Llama"])

# Define the prompt template
prompt = ChatPromptTemplate.from_messages(
    [("system", "You are a helpful AI assistant. Your name is Subbu Assistant."),
     ("user", "user query: {query}")]
)

# Initialize each model (adjust the model names based on available models)
llm_3_2 = Ollama(model="llama3.2")
llm_3_1 = Ollama(model="llama3.1")
code_llama = Ollama(model="codellama")

# Process input and display the response
if input_txt:
    # Select model based on user choice
    if model_choice == "Llama 3.2":
        response = llm_3_2(prompt.format(query=input_txt))
    elif model_choice == "Llama 3.1":
        response = llm_3_1(prompt.format(query=input_txt))
    elif model_choice == "Code Llama":
        response = code_llama(prompt.format(query=input_txt))
    
    # Display the response
    st.write(response)