sahanacp commited on
Commit
543acf2
·
verified ·
1 Parent(s): 3195421

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +96 -96
app.py CHANGED
@@ -1,97 +1,97 @@
1
- import gradio as gr
2
- from langchain_community.chat_models import ChatOpenAI
3
- from langchain.memory import ConversationBufferMemory, SimpleMemory
4
- from langchain.agents import initialize_agent, AgentType
5
- from dotenv import load_dotenv
6
- import os
7
- import agent.planning_agent as planning_agent
8
- import logging
9
-
10
- # Configure logging
11
- logging.basicConfig(level=logging.INFO)
12
- logger = logging.getLogger(__name__)
13
-
14
- # Global variables
15
- llm = None
16
- chat_memory = None
17
- query_memory = None
18
-
19
- def initialize_components():
20
- global llm, chat_memory, query_memory
21
- load_dotenv()
22
- api_key = os.environ['OA_API']
23
- os.environ['OPENAI_API_KEY'] = api_key
24
-
25
- llm = ChatOpenAI(
26
- model_name="gpt-3.5-turbo",
27
- temperature=0,
28
- api_key=api_key
29
- )
30
-
31
- # Initialize memories
32
- chat_memory = ConversationBufferMemory(
33
- memory_key="chat_history",
34
- return_messages=True
35
- )
36
- query_memory = SimpleMemory()
37
-
38
- # Initialize planning agent with both memories
39
- planning_agent.initialize_planning_agent(llm, chat_memory, query_memory)
40
-
41
- logger.info("Components initialized successfully")
42
-
43
- def process_query(query, history):
44
- try:
45
- # Restore chat history from Gradio's history
46
- if history:
47
- for human_msg, ai_msg in history:
48
- if chat_memory and hasattr(chat_memory, 'chat_memory'):
49
- chat_memory.chat_memory.add_user_message(human_msg)
50
- chat_memory.chat_memory.add_ai_message(ai_msg)
51
-
52
- # Store original query in query memory
53
- query_memory.memories['original_query'] = query
54
-
55
- # Execute query through planning agent
56
- response = planning_agent.execute(query)
57
-
58
- # Add current interaction to chat memory
59
- if chat_memory and hasattr(chat_memory, 'chat_memory'):
60
- chat_memory.chat_memory.add_user_message(query)
61
- chat_memory.chat_memory.add_ai_message(response)
62
-
63
- return response
64
-
65
- except Exception as e:
66
- error_msg = f"Error processing query: {str(e)}"
67
- logger.error(f"Error details: {str(e)}")
68
-
69
- if chat_memory and hasattr(chat_memory, 'chat_memory'):
70
- chat_memory.chat_memory.add_user_message(query)
71
- chat_memory.chat_memory.add_ai_message(error_msg)
72
-
73
- return error_msg
74
-
75
- def clear_context():
76
- planning_agent.clear_context()
77
- chat_memory.clear()
78
- query_memory.memories.clear()
79
- return [], []
80
-
81
- def create_gradio_app():
82
- from interface import create_interface
83
- return create_interface(process_query, clear_context)
84
-
85
- def main():
86
- """Main application entry point"""
87
- try:
88
- initialize_components()
89
- app = create_gradio_app()
90
- app.queue()
91
- app.launch(server_name="0.0.0.0", server_port=7860, share=True)
92
- except Exception as e:
93
- logger.error(f"Error in main: {str(e)}")
94
- raise
95
-
96
- if __name__ == "__main__":
97
  main()
 
1
+ import gradio as gr
2
+ from langchain_community.chat_models import ChatOpenAI
3
+ from langchain.memory import ConversationBufferMemory, SimpleMemory
4
+ from langchain.agents import initialize_agent, AgentType
5
+ from dotenv import load_dotenv
6
+ import os
7
+ import agent.planning_agent as planning_agent
8
+ import logging
9
+
10
+ # Configure logging
11
+ logging.basicConfig(level=logging.INFO)
12
+ logger = logging.getLogger(__name__)
13
+
14
+ # Global variables
15
+ llm = None
16
+ chat_memory = None
17
+ query_memory = None
18
+
19
+ def initialize_components():
20
+ global llm, chat_memory, query_memory
21
+ load_dotenv()
22
+ api_key = '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'
23
+ os.environ['OPENAI_API_KEY'] = api_key
24
+
25
+ llm = ChatOpenAI(
26
+ model_name="gpt-3.5-turbo",
27
+ temperature=0,
28
+ api_key=api_key
29
+ )
30
+
31
+ # Initialize memories
32
+ chat_memory = ConversationBufferMemory(
33
+ memory_key="chat_history",
34
+ return_messages=True
35
+ )
36
+ query_memory = SimpleMemory()
37
+
38
+ # Initialize planning agent with both memories
39
+ planning_agent.initialize_planning_agent(llm, chat_memory, query_memory)
40
+
41
+ logger.info("Components initialized successfully")
42
+
43
+ def process_query(query, history):
44
+ try:
45
+ # Restore chat history from Gradio's history
46
+ if history:
47
+ for human_msg, ai_msg in history:
48
+ if chat_memory and hasattr(chat_memory, 'chat_memory'):
49
+ chat_memory.chat_memory.add_user_message(human_msg)
50
+ chat_memory.chat_memory.add_ai_message(ai_msg)
51
+
52
+ # Store original query in query memory
53
+ query_memory.memories['original_query'] = query
54
+
55
+ # Execute query through planning agent
56
+ response = planning_agent.execute(query)
57
+
58
+ # Add current interaction to chat memory
59
+ if chat_memory and hasattr(chat_memory, 'chat_memory'):
60
+ chat_memory.chat_memory.add_user_message(query)
61
+ chat_memory.chat_memory.add_ai_message(response)
62
+
63
+ return response
64
+
65
+ except Exception as e:
66
+ error_msg = f"Error processing query: {str(e)}"
67
+ logger.error(f"Error details: {str(e)}")
68
+
69
+ if chat_memory and hasattr(chat_memory, 'chat_memory'):
70
+ chat_memory.chat_memory.add_user_message(query)
71
+ chat_memory.chat_memory.add_ai_message(error_msg)
72
+
73
+ return error_msg
74
+
75
+ def clear_context():
76
+ planning_agent.clear_context()
77
+ chat_memory.clear()
78
+ query_memory.memories.clear()
79
+ return [], []
80
+
81
+ def create_gradio_app():
82
+ from interface import create_interface
83
+ return create_interface(process_query, clear_context)
84
+
85
+ def main():
86
+ """Main application entry point"""
87
+ try:
88
+ initialize_components()
89
+ app = create_gradio_app()
90
+ app.queue()
91
+ app.launch(server_name="0.0.0.0", server_port=7860, share=True)
92
+ except Exception as e:
93
+ logger.error(f"Error in main: {str(e)}")
94
+ raise
95
+
96
+ if __name__ == "__main__":
97
  main()