arun47 commited on
Commit
a9fbd1f
·
verified ·
1 Parent(s): ca1fc23

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +18 -20
app.py CHANGED
@@ -24,6 +24,9 @@ prompt = PromptTemplate(
24
  template=template
25
  )
26
 
 
 
 
27
  # LLM with API Key
28
  llm = ChatOpenAI(
29
  temperature=0.5,
@@ -31,32 +34,27 @@ llm = ChatOpenAI(
31
  api_key=openai_api_key
32
  )
33
 
34
- # Chat Function (3 args required!)
35
- def get_text_response(user_message, history, state):
36
- # create memory if none exists for this session
37
- if state is None:
38
- state = ConversationBufferMemory(memory_key="chat_history")
39
-
40
- # Chain with per-user memory
41
- llm_chain = LLMChain(
42
- llm=llm,
43
- prompt=prompt,
44
- verbose=True,
45
- memory=state
46
- )
47
 
 
 
48
  response = llm_chain.predict(user_message=user_message)
49
- return response, state
50
 
51
- # Gradio Chat App
52
  demo = gr.ChatInterface(
53
  fn=get_text_response,
54
  examples=[
55
- ["How are you doing?", None],
56
- ["What are your interests?", None],
57
- ["Which places do you like to visit?", None]
58
- ],
59
- additional_inputs=[gr.State()]
60
  )
61
 
62
  if __name__ == "__main__":
 
24
  template=template
25
  )
26
 
27
+ # Memory (Conversation history)
28
+ memory = ConversationBufferMemory(memory_key="chat_history")
29
+
30
  # LLM with API Key
31
  llm = ChatOpenAI(
32
  temperature=0.5,
 
34
  api_key=openai_api_key
35
  )
36
 
37
+ # LLM Chain
38
+ llm_chain = LLMChain(
39
+ llm=llm,
40
+ prompt=prompt,
41
+ verbose=True,
42
+ memory=memory
43
+ )
 
 
 
 
 
 
44
 
45
+ # Chat Function
46
+ def get_text_response(user_message, history):
47
  response = llm_chain.predict(user_message=user_message)
48
+ return response
49
 
50
+ # Gradio Chat App
51
  demo = gr.ChatInterface(
52
  fn=get_text_response,
53
  examples=[
54
+ "How are you doing?",
55
+ "What are your interests?",
56
+ "Which places do you like to visit?"
57
+ ]
 
58
  )
59
 
60
  if __name__ == "__main__":