File size: 3,268 Bytes
e0d70b9
3cd2906
e0d70b9
 
 
3cd2906
e0d70b9
 
 
 
 
 
3cd2906
e0d70b9
 
 
 
 
3cd2906
e0d70b9
 
3cd2906
e0d70b9
 
 
 
 
3cd2906
e0d70b9
 
 
 
 
 
 
 
3cd2906
e0d70b9
 
 
 
 
 
 
 
3cd2906
e0d70b9
3cd2906
e0d70b9
 
 
 
3cd2906
e0d70b9
 
 
 
 
 
3cd2906
e0d70b9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3cd2906
 
e0d70b9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3cd2906
e0d70b9
 
 
3cd2906
e0d70b9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
import os
import gradio as gr
from langchain_groq import ChatGroq
from langchain_core.prompts import ChatPromptTemplate
from langchain_tavily import TavilySearch

# =====================================================
# SYSTEM PROMPT
# =====================================================
ai_subhash = """
You are Subhash AI, a smart and friendly AI Mentor for engineering students.
Explain concepts clearly and step by step in simple words.

Always:
- Use simple language
- Give step-by-step explanations
- Use examples or analogies
- Be friendly, patient, and motivating

After every answer, ask one small follow-up question.
"""

# =====================================================
# LOAD SECRETS (HUGGING FACE WAY)
# =====================================================
GROQ_API_KEY = os.getenv("GROQ_API_KEY")
TAVILY_API_KEY = os.getenv("TAVILY_API_KEY")

# =====================================================
# MODEL + SEARCH
# =====================================================
llm = ChatGroq(
    model_name="openai/gpt-oss-120b",
    temperature=0,
    groq_api_key=GROQ_API_KEY
)

prompt = ChatPromptTemplate.from_messages([
    ("system", ai_subhash),
    ("human",
     "Chat history:\n{chat_history}\n\n"
     "Context:\n{context}\n\n"
     "User: {user_input}\n\n"
     "AI:")
])

chain = prompt | llm

search = TavilySearch(
    max_result=5,
    tavily_api_key=TAVILY_API_KEY
)

# =====================================================
# CHAT LOGIC
# =====================================================
def predict(message, history):
    if not message.strip():
        return ""

    chat_history = ""
    for h in history:
        chat_history += f"User: {h[0]}\nAI: {h[1]}\n"

    # Web search
    results = search.invoke(message)
    context = "\n".join(
        r.get("content", "") for r in results.get("result", [])
    )

    response = chain.invoke({
        "user_input": message,
        "context": context,
        "chat_history": chat_history
    })

    return response.content

# =====================================================
# UI STYLING
# =====================================================
custom_css = """
body {
    background: radial-gradient(circle at top, #020617, #000000);
}
.gr-chatbot {
    background: rgba(2, 6, 23, 0.75);
    border-radius: 18px;
}
"""

# =====================================================
# GRADIO UI
# =====================================================
with gr.Blocks(css=custom_css, theme=gr.themes.Soft()) as demo:

    gr.Markdown("# 🤖 Subhash Chatbot")
    gr.Markdown(
        "<div style='text-align:center;color:#a5b4fc'>AI mentor with memory + web search</div>"
    )

    chatbot = gr.Chatbot(height=420)
    msg = gr.Textbox(
        placeholder="Ask your question...",
        show_label=False
    )

    clear = gr.Button("🧹 Clear Chat")

    def respond(message, chat_history):
        reply = predict(message, chat_history)
        chat_history.append((message, reply))
        return "", chat_history

    msg.submit(respond, [msg, chatbot], [msg, chatbot])
    clear.click(lambda: [], None, chatbot)

    gr.Markdown(
        "<footer style='text-align:center;color:#94a3b8'>Built by Subhash • Powered by Groq + Tavily</footer>"
    )

demo.launch()