subramaniyam commited on
Commit
eb7ee60
·
verified ·
1 Parent(s): bfdb808

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -66
app.py CHANGED
@@ -1,77 +1,36 @@
1
- import streamlit as st
2
- from langchain.prompts import ChatPromptTemplate # type: ignore
3
- from langchain_huggingface import HuggingFacePipeline # type: ignore
4
- import json
5
- import os
6
 
7
  # Streamlit setup
8
  st.title("Subbu Chat Bot")
9
-
10
- # Initialize session state for storing conversation history
11
- if "history" not in st.session_state:
12
- st.session_state.history = []
13
-
14
- # User input
15
- input_txt = st.text_input("Please enter your queries here...")
16
 
17
  # Add a dropdown for model selection
18
- model_choice = st.selectbox("Select the model:", ["Meta-Llama-8B"])
19
 
20
  # Define the prompt template
21
- prompt_template = ChatPromptTemplate.from_messages(
22
- [("system", "You are a helpful AI assistant. Your name is Subbu Assistant.")]
23
- )
24
-
25
- # Initialize the model using Hugging Face Transformers
26
- llm_8b = HuggingFacePipeline.from_model_id(
27
- model_id="facebook-llama/Meta-Llama-8B",
28
- task="text-generation",
29
- pipeline_kwargs={"max_new_tokens": 100, "top_k": 50, "temperature": 0.7},
30
- api_token="your_hugging_face_api_token"
31
  )
32
 
33
- # Function to get the response from the assistant
34
- def get_response(model, input_text):
35
- prompt = prompt_template.format(query=input_text)
36
- response = model(prompt)
37
- return response[0]['generated_text']
38
-
39
- # Function to save session state to a file
40
- def save_session_state():
41
- with open("session_state.json", "w") as f:
42
- json.dump(st.session_state.history, f)
43
-
44
- # Function to load session state from a file
45
- def load_session_state():
46
- if os.path.exists("session_state.json"):
47
- with open("session_state.json", "r") as f:
48
- st.session_state.history = json.load(f)
49
-
50
- # Load session state if it exists
51
- load_session_state()
52
 
53
  # Process input and display the response
54
- if st.button("Send"):
55
- if input_txt:
56
- # Select model based on user choice
57
- model = llm_8b
58
-
59
- # Get the model response
60
- response = get_response(model, input_txt)
61
-
62
- # Save the query and response in the session history
63
- st.session_state.history.append({"user": input_txt, "assistant": response})
64
-
65
- # Save session state
66
- save_session_state()
67
-
68
- # Clear conversation button
69
- if st.button("Clear Conversation"):
70
- st.session_state.history = []
71
- if os.path.exists("session_state.json"):
72
- os.remove("session_state.json")
73
-
74
- # Display conversation history
75
- for entry in st.session_state.history:
76
- st.write(f"**User:** {entry['user']}")
77
- st.write(f"**Subbu Assistant:** {entry['assistant']}")
 
1
+ # Import necessary modules
2
+ from langchain.prompts import ChatPromptTemplate # type: ignore
3
+ from langchain.llms import Ollama # type: ignore
4
+ import streamlit as st # type: ignore
 
5
 
6
  # Streamlit setup
7
  st.title("Subbu Chat Bot")
8
+ input_txt = st.text_input("Enter your queries here...")
 
 
 
 
 
 
9
 
10
  # Add a dropdown for model selection
11
+ model_choice = st.selectbox("Select the model:", ["Llama 3.2", "Llama 3.1", "Code Llama", "subbu"])
12
 
13
  # Define the prompt template
14
+ prompt = ChatPromptTemplate.from_messages(
15
+ [("system", "You are a helpful AI assistant. Your name is Subbu Assistant."),
16
+ ("user", "user query: {query}")]
 
 
 
 
 
 
 
17
  )
18
 
19
+ # Initialize each model (adjust the model names based on available models)
20
+ llm_3_2 = Ollama(model="llama3.2")
21
+ llm_3_1 = Ollama(model="llama3.1")
22
+ code_llama = Ollama(model="codellama")
23
+ subbu = Ollama(model="subbu")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
 
25
  # Process input and display the response
26
+ if input_txt:
27
+ # Select model based on user choice
28
+ if model_choice == "Llama 3.2":
29
+ response = llm_3_2(prompt.format(query=input_txt))
30
+ elif model_choice == "Llama 3.1":
31
+ response = llm_3_1(prompt.format(query=input_txt))
32
+ elif model_choice == "Code Llama":
33
+ response = code_llama(prompt.format(query=input_txt))
34
+
35
+ # Display the response
36
+ st.write(response)