Spaces:
Build error
Build error
| # Import necessary modules | |
| from langchain.prompts import ChatPromptTemplate # type: ignore | |
| from langchain.llms import Ollama # type: ignore | |
| import streamlit as st # type: ignore | |
| # Streamlit setup | |
| st.title("Subbu Chat Bot") | |
| input_txt = st.text_input("Enter your queries here...") | |
| # Add a dropdown for model selection | |
| model_choice = st.selectbox("Select the model:", ["Llama 3.2", "Llama 3.1", "Code Llama"]) | |
| # Define the prompt template | |
| prompt = ChatPromptTemplate.from_messages( | |
| [("system", "You are a helpful AI assistant. Your name is Subbu Assistant."), | |
| ("user", "user query: {query}")] | |
| ) | |
| # Initialize each model (adjust the model names based on available models) | |
| llm_3_2 = Ollama(model="llama3.2") | |
| llm_3_1 = Ollama(model="llama3.1") | |
| code_llama = Ollama(model="codellama") | |
| # Process input and display the response | |
| if input_txt: | |
| # Select model based on user choice | |
| if model_choice == "Llama 3.2": | |
| response = llm_3_2(prompt.format(query=input_txt)) | |
| elif model_choice == "Llama 3.1": | |
| response = llm_3_1(prompt.format(query=input_txt)) | |
| elif model_choice == "Code Llama": | |
| response = code_llama(prompt.format(query=input_txt)) | |
| # Display the response | |
| st.write(response) | |