arun47 commited on
Commit
21b20f0
·
verified ·
1 Parent(s): 6e733e0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -29
app.py CHANGED
@@ -25,7 +25,11 @@ prompt = PromptTemplate(
25
  template=template
26
  )
27
 
28
- # LLM with API Key
 
 
 
 
29
  llm = ChatOpenAI(
30
  temperature=0.5,
31
  model="gpt-4o-mini",
@@ -33,47 +37,32 @@ llm = ChatOpenAI(
33
  )
34
 
35
 
36
- # --- Core Function ---
37
- def get_text_response(user_message, history, memory_state):
38
- if memory_state is None:
39
- memory_state = ConversationBufferMemory(memory_key="chat_history")
40
 
41
- llm_chain = LLMChain(
42
  llm=llm,
43
  prompt=prompt,
44
- verbose=True,
45
- memory=memory_state
46
  )
47
 
48
- response = llm_chain.predict(user_message=user_message)
49
  history = history + [[user_message, response]]
50
- return history, memory_state
51
 
52
 
53
  # --- Gradio UI ---
54
  with gr.Blocks() as demo:
55
- chatbot = gr.Chatbot()
56
  msg = gr.Textbox(placeholder="Type your message...", show_label=False)
57
- send_btn = gr.Button("Send")
58
  memory_state = gr.State()
59
 
60
- def respond(message, history, memory_state):
61
- history, memory_state = get_text_response(message, history, memory_state)
62
- return history, memory_state, gr.Textbox.update(value="") # clear input box
63
-
64
- # Enter key submit
65
- msg.submit(
66
- respond,
67
- [msg, chatbot, memory_state],
68
- [chatbot, memory_state, msg]
69
- )
70
-
71
- # Send button click
72
- send_btn.click(
73
- respond,
74
- [msg, chatbot, memory_state],
75
- [chatbot, memory_state, msg]
76
- )
77
 
78
  if __name__ == "__main__":
79
  demo.launch(share=True)
 
25
  template=template
26
  )
27
 
28
+ # Memory
29
+ def new_memory():
30
+ return ConversationBufferMemory(memory_key="chat_history")
31
+
32
+ # LLM
33
  llm = ChatOpenAI(
34
  temperature=0.5,
35
  model="gpt-4o-mini",
 
37
  )
38
 
39
 
40
+ # --- Core Chat Function ---
41
+ def respond(user_message, history, memory_state):
42
+ if memory_state is None: # create new memory for each session
43
+ memory_state = new_memory()
44
 
45
+ chain = LLMChain(
46
  llm=llm,
47
  prompt=prompt,
48
+ memory=memory_state,
49
+ verbose=False
50
  )
51
 
52
+ response = chain.predict(user_message=user_message)
53
  history = history + [[user_message, response]]
54
+ return history, memory_state, "" # last "" clears textbox
55
 
56
 
57
  # --- Gradio UI ---
58
  with gr.Blocks() as demo:
59
+ chatbot = gr.Chatbot(label="Arun AI Assistant")
60
  msg = gr.Textbox(placeholder="Type your message...", show_label=False)
61
+ send = gr.Button("Send")
62
  memory_state = gr.State()
63
 
64
+ msg.submit(respond, [msg, chatbot, memory_state], [chatbot, memory_state, msg])
65
+ send.click(respond, [msg, chatbot, memory_state], [chatbot, memory_state, msg])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
 
67
  if __name__ == "__main__":
68
  demo.launch(share=True)