File size: 11,795 Bytes
6f050d2
2f22113
 
550f98e
ac1d1c0
 
6f050d2
b64c343
 
a312592
c8ce7d7
ac1d1c0
 
 
59c2070
 
a312592
c8ce7d7
59c2070
c8ce7d7
550f98e
59c2070
862323b
2f22113
a312592
 
 
 
 
59c2070
ac1d1c0
 
a312592
ac1d1c0
59c2070
ac1d1c0
 
bcd2917
 
a312592
ac1d1c0
a312592
0861547
2232670
bcd2917
59c2070
a312592
 
 
b64c343
59c2070
a312592
 
 
 
 
 
 
59c2070
a312592
59c2070
 
 
c8ce7d7
2f22113
59c2070
ac1d1c0
59c2070
 
ac1d1c0
59c2070
a312592
59c2070
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f213ab0
59c2070
 
 
f213ab0
59c2070
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c351b7a
59c2070
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4b7beba
 
 
59c2070
 
4b7beba
 
 
59c2070
 
e21021f
 
 
 
 
4b7beba
 
e21021f
 
 
 
59c2070
 
4b7beba
 
e21021f
 
 
 
 
 
 
 
 
 
4b7beba
59c2070
 
e21021f
 
4b7beba
e21021f
59c2070
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4b7beba
59c2070
 
 
 
 
919808b
59c2070
 
 
919808b
59c2070
 
4b7beba
59c2070
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bcd2917
c351b7a
59c2070
 
4b7beba
59c2070
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4b7beba
 
59c2070
 
 
 
 
 
 
bcd2917
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
59c2070
 
 
bcd2917
 
 
59c2070
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bcd2917
59c2070
 
 
bcd2917
 
 
59c2070
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
f213ab0
59c2070
7108e4b
4a5989e
c8ce7d7
59c2070
c8ce7d7
59c2070
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
import gradio as gr
import requests
import os
import json
from typing import List, Dict
from datetime import datetime

# Groq API Configuration
API_URL = "https://api.groq.com/openai/v1/chat/completions"
API_KEY = os.getenv("GROQ_API_KEY")

# In-memory chat history storage
chat_history: List[Dict[str, str]] = []

def groq_with_memory(message: str, topic: str = "general") -> tuple:
    """Groq API call with chat history"""
    
    if not API_KEY:
        return "❌ No API Key found", ""
    
    if not message.strip():
        return "❌ Empty message", ""
    
    try:
        headers = {
            "Authorization": f"Bearer {API_KEY}",
            "Content-Type": "application/json"
        }
        
        # Add current message to history
        chat_history.append({
            "role": "user",
            "content": message.strip(),
            "topic": topic,
            "timestamp": datetime.now().strftime("%Y-%m-%d %H:%M")
        })
        
        # Prepare messages for API call (last 50 messages for longer context)
        recent_history = chat_history[-50:]
        messages = [{"role": msg["role"], "content": msg["content"]} for msg in recent_history]
        
        payload = {
            "model": "gemma2-9b-it",
            "messages": messages,
            "max_tokens": 3000,  # Increased for longer responses
            "temperature": 0.7
        }
        
        response = requests.post(API_URL, headers=headers, json=payload, timeout=30)
        
        if response.status_code == 200:
            result = response.json()
            if "choices" in result and result["choices"]:
                response_content = result["choices"][0]["message"]["content"]
                chat_history.append({
                    "role": "assistant",
                    "content": response_content,
                    "topic": topic,
                    "timestamp": datetime.now().strftime("%Y-%m-%d %H:%M")
                })
                return response_content, ""  # Clear input after send
            return f"❌ No response: {result}", ""
        return f"❌ HTTP {response.status_code}: {response.text}", ""
            
    except Exception as e:
        return f"❌ Error: {str(e)}", ""

def get_chat_summary(topic_filter: str = None) -> str:
    """Generate conversation summary"""
    if not chat_history:
        return "❌ No chat history to summarize"
    
    filtered = [msg for msg in chat_history if not topic_filter or msg.get("topic") == topic_filter]
    
    if not filtered:
        return f"❌ No messages found for topic: {topic_filter}"
    
    # Group by topic
    topics = {}
    for msg in filtered:
        topic = msg.get("topic", "general")
        if topic not in topics:
            topics[topic] = []
        topics[topic].append(msg)
    
    summary = "πŸ“‹ **Chat Summary**\n\n"
    for topic, messages in topics.items():
        user_msgs = [m for m in messages if m["role"] == "user"]
        ai_msgs = [m for m in messages if m["role"] == "assistant"]
        
        summary += f"**🏷️ Topic: {topic}**\n"
        summary += f"- Messages: {len(user_msgs)} user, {len(ai_msgs)} AI\n"
        summary += f"- Time span: {messages[0]['timestamp']} - {messages[-1]['timestamp']}\n"
        
        if user_msgs:
            summary += f"- Key topics discussed: {', '.join(msg['content'][:50] + '...' for msg in user_msgs[:3])}\n"
        summary += "\n"
    
    return summary

def get_full_history(topic_filter: str = None) -> str:
    """Get full chat history with optional topic filter"""
    if not chat_history:
        return "❌ No chat history available"
    
    filtered = [msg for msg in chat_history if not topic_filter or msg.get("topic") == topic_filter]
    
    if not filtered:
        return f"❌ No messages found for topic: {topic_filter}"
    
    history_text = f"πŸ“š **Chat History** ({len(filtered)} messages)\n\n"
    
    current_topic = None
    for msg in filtered:
        # Add topic header when topic changes
        if msg.get("topic") != current_topic:
            current_topic = msg.get("topic")
            history_text += f"\n**🏷️ Topic: {current_topic}**\n"
            history_text += "---\n"
        
        role_icon = "πŸ‘€" if msg["role"] == "user" else "πŸ€–"
        history_text += f"{role_icon} **{msg['timestamp']}**\n"
        history_text += f"{msg['content']}\n\n"
    
    return history_text

def clear_all_history():
    """Clear all chat history"""
    global chat_history
    chat_history.clear()
    return "βœ… All chat history cleared", "", ""

def get_topics_list() -> List[str]:
    """Get list of unique topics"""
    topics = list(set(msg.get("topic", "general") for msg in chat_history))
    return ["All Topics"] + sorted(topics)

# Custom CSS for better UI
custom_css = """
.gradio-container {
    max-width: 100% !important;
    padding: 0 !important;
    margin: 0 !important;
}

.main {
    max-width: 100% !important;
    padding: 10px !important;
}

/* Better font for readability */
* {
    font-family: 'Inter', 'Segoe UI', system-ui, -apple-system, sans-serif !important;
}

/* AI Response text spacing */
.response-area textarea {
    line-height: 1.7 !important;
    padding: 20px !important;
    font-size: 15px !important;
    font-family: 'Inter', 'Segoe UI', system-ui, sans-serif !important;
}

/* History text spacing */
.history-display textarea {
    line-height: 1.7 !important;
    padding: 20px !important;
    font-size: 15px !important;
    font-family: 'Inter', 'Segoe UI', system-ui, sans-serif !important;
}

/* Input text styling */
.input-area textarea {
    font-family: 'Inter', 'Segoe UI', system-ui, sans-serif !important;
    font-size: 15px !important;
    line-height: 1.6 !important;
}

/* Better text readability everywhere */
textarea, input {
    line-height: 1.6 !important;
    font-family: 'Inter', 'Segoe UI', system-ui, sans-serif !important;
}
"""

# Main Gradio Interface
with gr.Blocks(
    title="πŸ€– AI Journal Chat", 
    theme=gr.themes.Soft(),
    css=custom_css
) as demo:
    
    gr.Markdown("# πŸ“ AI Journal Chat Interface")
    gr.Markdown("*Write, chat, and keep track of your thoughts with AI assistance*")
    
    with gr.Tabs() as tabs:
        # MAIN CHAT TAB
        with gr.Tab("πŸ’¬ Chat"):
            # AI Response Area (Top)
            ai_response = gr.Textbox(
                label="πŸ€– AI Response",
                lines=12,
                max_lines=20,
                interactive=False,
                placeholder="AI responses will appear here...",
                show_copy_button=True,
                elem_classes="response-area"
            )
            
            # Input Area (Bottom)
            with gr.Group():
                with gr.Row():
                    user_input = gr.Textbox(
                        label="✍️ Your Message",
                        placeholder="Type your thoughts, questions, or journal entry here...",
                        lines=4,
                        max_lines=10,
                        scale=3
                    )
                    with gr.Column(scale=1):
                        topic_input = gr.Textbox(
                            label="🏷️ Topic",
                            value="journal",
                            placeholder="e.g., work, personal, ideas"
                        )
                        send_btn = gr.Button("πŸ“€ Send", variant="primary", size="lg")
                
                with gr.Row():
                    clear_response_btn = gr.Button("πŸ—‘οΈ Clear Response", variant="secondary")
                    show_context_btn = gr.Button("πŸ“‹ Show Current Context", variant="secondary")
        
        # HISTORY TAB
        with gr.Tab("πŸ“š Chat History"):
            with gr.Group():
                with gr.Row():
                    topic_filter = gr.Dropdown(
                        label="πŸ” Filter by Topic",
                        choices=["All Topics"],
                        value="All Topics",
                        interactive=True
                    )
                    refresh_topics_btn = gr.Button("πŸ”„ Refresh Topics", variant="secondary")
                
                with gr.Row():
                    show_history_btn = gr.Button("πŸ“– Show Full History", variant="primary")
                    show_summary_btn = gr.Button("πŸ“‹ Show Summary", variant="secondary")
                    clear_history_btn = gr.Button("πŸ—‘οΈ Clear All History", variant="stop")
                
                history_display = gr.Textbox(
                    label="πŸ“š History & Summary",
                    lines=20,
                    max_lines=30,
                    interactive=False,
                    show_copy_button=True,
                    placeholder="Chat history and summaries will appear here...",
                    elem_classes="history-display"
                )
    
    # Event Handlers
    def send_message(message, topic):
        response, cleared_input = groq_with_memory(message, topic)
        return response, cleared_input
    
    def show_current_context():
        """Show current conversation context that AI can see"""
        if not chat_history:
            return "❌ No conversation context yet"
        
        recent_history = chat_history[-50:]  # Same as what AI sees
        context_text = f"🧠 **Current AI Context** ({len(recent_history)} messages)\n\n"
        
        for msg in recent_history:
            role_icon = "πŸ‘€" if msg["role"] == "user" else "πŸ€–"
            context_text += f"{role_icon} **{msg['timestamp']}** [{msg.get('topic', 'general')}]\n"
            context_text += f"{msg['content'][:100]}{'...' if len(msg['content']) > 100 else ''}\n\n"
        
        context_text += f"\nπŸ’‘ *AI can remember these {len(recent_history)} messages in current conversation*"
        return context_text
    
    def refresh_topic_choices():
        return gr.Dropdown(choices=get_topics_list())
    
    def clear_only_response():
        return ""
    
    def filter_and_show_history(topic_filter):
        filter_topic = None if topic_filter == "All Topics" else topic_filter
        return get_full_history(filter_topic)
    
    def filter_and_show_summary(topic_filter):
        filter_topic = None if topic_filter == "All Topics" else topic_filter
        return get_chat_summary(filter_topic)
    
    # Button Events
    send_btn.click(
        send_message,
        inputs=[user_input, topic_input],
        outputs=[ai_response, user_input]
    )
    
    user_input.submit(
        send_message,
        inputs=[user_input, topic_input],
        outputs=[ai_response, user_input]
    )
    
    clear_response_btn.click(
        clear_only_response,
        outputs=[ai_response]
    )
    
    show_context_btn.click(
        show_current_context,
        outputs=[ai_response]
    )
    
    refresh_topics_btn.click(
        refresh_topic_choices,
        outputs=[topic_filter]
    )
    
    show_history_btn.click(
        filter_and_show_history,
        inputs=[topic_filter],
        outputs=[history_display]
    )
    
    show_summary_btn.click(
        filter_and_show_summary,
        inputs=[topic_filter],
        outputs=[history_display]
    )
    
    clear_history_btn.click(
        clear_all_history,
        outputs=[ai_response, user_input, history_display]
    )

# Launch configuration
print("πŸš€ Starting AI Journal Chat Interface...")
print(f"🌐 Access at: http://localhost:7860")
print(f"πŸ”‘ API Key: {'βœ… Found' if API_KEY else '❌ Missing'}")

if __name__ == "__main__":
    demo.launch(
        server_name="0.0.0.0",
        server_port=7860,
        show_error=True,
        share=False
    )