Samagra07 commited on
Commit
a4b6367
·
verified ·
1 Parent(s): 1b38f45

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +71 -0
  2. requirements.txt +6 -0
app.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from langchain.chains.llm import LLMChain
3
+ from langchain_core.prompts import (
4
+ ChatPromptTemplate,
5
+ HumanMessagePromptTemplate,
6
+ MessagesPlaceholder,
7
+ )
8
+ from langchain_core.messages import SystemMessage
9
+ from langchain.chains.conversation.memory import ConversationBufferWindowMemory
10
+ from langchain_groq import ChatGroq
11
+ from dotenv import load_dotenv
12
+ import os
13
+ load_dotenv()
14
+
15
+ def main():
16
+ groq_api_key = os.getenv("GROQ_API_KEY")
17
+ st.title("Chat with Groq!")
18
+ st.write("Hello! I'm your friendly Groq chatbot. I can help answer your questions, provide information, or just chat. I'm also super fast! Let's start our conversation!")
19
+
20
+ st.sidebar.title('Customization')
21
+ system_prompt = st.sidebar.text_input("System prompt:")
22
+ model = st.sidebar.selectbox(
23
+ 'Choose a model',
24
+ ['llama3-8b-8192', 'mixtral-8x7b-32768', 'gemma-7b-it']
25
+ )
26
+ conversational_memory_length = st.sidebar.slider('Conversational memory length:', 1, 10, value = 5)
27
+
28
+ memory = ConversationBufferWindowMemory(k=conversational_memory_length, memory_key="chat_history", return_messages=True)
29
+
30
+ user_question = st.chat_input("Ask a question:")
31
+ if 'chat_history' not in st.session_state:
32
+ st.session_state.chat_history=[]
33
+ else:
34
+ for message in st.session_state.chat_history:
35
+ memory.save_context(
36
+ {'input':message['human']},
37
+ {'output':message['AI']}
38
+ )
39
+ groq_chat = ChatGroq(
40
+ groq_api_key=groq_api_key,
41
+ model_name=model
42
+ )
43
+ if user_question:
44
+ prompt = ChatPromptTemplate.from_messages(
45
+ [
46
+ SystemMessage(
47
+ content=system_prompt
48
+ ),
49
+
50
+ MessagesPlaceholder(
51
+ variable_name="chat_history"
52
+ ),
53
+
54
+ HumanMessagePromptTemplate.from_template(
55
+ "{human_input}"
56
+ ),
57
+ ]
58
+ )
59
+ conversation = LLMChain(
60
+ llm=groq_chat,
61
+ prompt=prompt,
62
+ verbose=True,
63
+ memory=memory,
64
+ )
65
+ response = conversation.predict(human_input=user_question)
66
+ message = {"human":user_question,"AI": response}
67
+ st.session_state.chat_history.append(message)
68
+ st.write("Chatbot:", response)
69
+
70
+ if __name__ == "__main__":
71
+ main()
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ langchain
2
+ langchain_community
3
+ langchain_core
4
+ langchain_groq
5
+ python-dotenv
6
+ streamlit