File size: 7,301 Bytes
cf44ed6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5492df5
cf44ed6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8748dfd
cf44ed6
 
 
8748dfd
cf44ed6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8748dfd
cf44ed6
8748dfd
cf44ed6
 
 
 
5492df5
8748dfd
5492df5
8748dfd
 
 
 
5492df5
 
cf44ed6
 
 
 
 
 
 
8748dfd
cf44ed6
 
 
 
5492df5
 
8748dfd
5492df5
 
 
 
 
cf44ed6
5492df5
cf44ed6
5492df5
 
cf44ed6
 
5492df5
 
cf44ed6
 
5492df5
cf44ed6
 
5492df5
cf44ed6
5492df5
 
 
cf44ed6
5492df5
cf44ed6
5492df5
cf44ed6
 
 
 
 
 
 
 
 
 
 
5492df5
cf44ed6
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
import gradio as gr
import os
import json
from dotenv import load_dotenv
from langchain_anthropic import ChatAnthropic

load_dotenv()

class LegionMariaAssistant:
    def __init__(self):
        # Router LLM - lightweight for section classification
        self.router_llm = ChatAnthropic(
            anthropic_api_key=os.getenv("ANTHROPIC_API_KEY"),
            model="claude-3-5-haiku-20241022",
            temperature=0.1
        )
        
        # Response LLM - for generating final answers
        self.response_llm = ChatAnthropic(
            anthropic_api_key=os.getenv("ANTHROPIC_API_KEY"),
            model="claude-3-5-haiku-20241022",
            temperature=0.1  # Lower temperature for more consistent, direct responses
        )
        
        self.data_content = {}
        self.load_data()
    
    def load_data(self):
        """Load structured JSON data"""
        data_file = "./data.json"
        if os.path.exists(data_file):
            try:
                with open(data_file, 'r', encoding='utf-8') as f:
                    self.data_content = json.load(f)
                
                print("Loaded Legion Maria JSON data successfully")
                print(f"Available sections: {list(self.data_content.keys())}")
                
            except Exception as e:
                print(f"Error loading data: {str(e)}")
                self.data_content = {}
        else:
            print("data.json not found")
            self.data_content = {}
    
    def route_query(self, message):
        """Router LLM decides which section of data to use"""
        available_sections = list(self.data_content.keys())
        
        router_prompt = f"""You are a query router for the Legion Maria Youth Affairs system. 
        
Available data sections: {available_sections}

Each section contains:
- about: mission, vision, core values, organizational information
- office: projects, community outreach, operational details
- leadership: organizational structure, leadership team, roles

User query: "{message}"

Respond with ONLY the most relevant section name from the available sections. If the query spans multiple sections or is general, respond with "general".

Examples:
- "Who is the director?" -> leadership
- "What is your mission?" -> about
- "Tell me about your projects" -> office
- "What do you do?" -> general

Section:"""

        try:
            response = self.router_llm.invoke([{"role": "user", "content": router_prompt}])
            section = response.content.strip().lower()
            
            # Validate section exists
            if section in available_sections:
                return section
            elif section == "general":
                return "general"
            else:
                return "about"  # Default fallback
                
        except Exception as e:
            print(f"Router error: {str(e)}")
            return "about"  # Default fallback
    
    def chat_response(self, message, history):
        """Two-tier LLM system: Router + Specialist response"""
        if not message.strip():
            return "Please ask me something about our Legion Maria Youth Affairs!"
        
        try:
            if not self.data_content:
                return "I don't have that information available right now."
            
            # Step 1: Router LLM decides which section to use
            selected_section = self.route_query(message)
            print(f"Router selected section: {selected_section}")
            
            # Step 2: Get relevant data based on routing decision
            if selected_section == "general":
                # Use all data for general queries
                relevant_data = self.data_content
            else:
                # Use only the specific section
                relevant_data = {selected_section: self.data_content.get(selected_section, {})}
            
            # Build conversation context
            conversation_context = ""
            if history:
                conversation_context = "Previous conversation:\n"
                for user_msg, assistant_msg in history[-3:]:  # Keep last 3 exchanges
                    conversation_context += f"User: {user_msg}\nAssistant: {assistant_msg}\n\n"
                conversation_context += "Current conversation:\n"
            
            # Step 3: Response LLM generates answer using only relevant data
            response_prompt = f"""You are Santa Legion from the Legion Maria Directorate of Youth Affairs. Speak in first person as a member of the organization.

Your Knowledge:
{json.dumps(relevant_data, indent=2)}

{conversation_context}User: {message}

Guidelines:
- You are Santa Legion, speak as "I" and "we" (the organization)
- Keep responses SHORT (1-3 sentences maximum)
- Be direct and personal
- Never mention being provided documents or data
- Speak as if this is your natural knowledge
- Use "our mission", "we believe", "I can help you with"

Answer:"""

            # Get response from specialist LLM
            response = self.response_llm.invoke([{"role": "user", "content": response_prompt}])
            return response.content

        except Exception as e:
            print(f"Error generating response: {str(e)}")
            return "I'm sorry, I'm having trouble right now. Please try again."

def main():
    assistant = LegionMariaAssistant()
    
    # Initial greeting message
    initial_greeting = [
        [None, "πŸ‘‹ Hello! I'm Santa Legion from the Legion Maria Youth Affairs. I'm here to help you learn about our mission, leadership, projects, and activities. What would you like to know?"]
    ]
    
    # Create mobile-optimized Gradio chat interface
    with gr.Blocks(title="Legion Maria Chat", theme=gr.themes.Soft()) as demo:
        gr.Markdown("# πŸ’¬ Legion Maria YA")
        
        # Mobile-optimized chat interface
        chatbot = gr.Chatbot(
            value=initial_greeting,
            height=400,  # Reduced for mobile
            show_label=False,
            container=True,
            bubble_full_width=True,  # Better for mobile
            show_share_button=False
        )
        
        # Mobile-friendly input layout
        with gr.Row():
            msg = gr.Textbox(
                placeholder="Ask me anything...",
                show_label=False,
                scale=5,
                container=False,
                lines=1
            )
            send_btn = gr.Button("πŸ“€", variant="primary", scale=1, size="sm")
        
        clear = gr.Button("πŸ”„ New Chat", variant="secondary", size="sm")
            
        # Chat functionality
        def respond(message, history):
            if message.strip():
                bot_response = assistant.chat_response(message, history)
                history.append([message, bot_response])
            return history, ""

        # Event handlers
        msg.submit(respond, [msg, chatbot], [chatbot, msg])
        send_btn.click(respond, [msg, chatbot], [chatbot, msg])
        clear.click(lambda: initial_greeting, None, chatbot)

    # Launch with better settings for chat app
    demo.launch(
        share=False,
        server_name="0.0.0.0",
        server_port=7860,
        show_api=False
    )

if __name__ == "__main__":
    main()