|
|
import os |
|
|
from io import BytesIO |
|
|
|
|
|
import streamlit as st |
|
|
from langchain.prompts import PromptTemplate |
|
|
from langchain.chains.llm import LLMChain |
|
|
from langchain_google_genai import ChatGoogleGenerativeAI |
|
|
from fpdf import FPDF |
|
|
|
|
|
|
|
|
os.environ["GOOGLE_API_KEY"] = os.getenv("G_API") |
|
|
|
|
|
|
|
|
|
|
|
model = ChatGoogleGenerativeAI(model="gemini-2.0-flash", |
|
|
temperature=0.0) |
|
|
|
|
|
|
|
|
|
|
|
prompt = PromptTemplate.from_template( |
|
|
"You are an expert and motivational AI Mentor. Provide detailed, thoughtful, and practical guidance in response to the following query. Avoid unnecessary fluff or emojis.\n\n{input}" |
|
|
) |
|
|
|
|
|
chain = LLMChain(llm=model, prompt=prompt) |
|
|
|
|
|
def ai_mentor(prompt_input: str) -> str: |
|
|
return chain.run(input=prompt_input) |
|
|
|
|
|
def create_pdf_buffer(messages) -> BytesIO: |
|
|
buffer = BytesIO() |
|
|
pdf = FPDF() |
|
|
pdf.add_page() |
|
|
pdf.set_font("Helvetica", size=14) |
|
|
pdf.cell(200, 20, "Chat History with AI Mentor", ln=1, align="C") |
|
|
pdf.ln(10) |
|
|
pdf.set_font("Helvetica", size=12) |
|
|
for msg in messages: |
|
|
role = msg["role"].capitalize() |
|
|
pdf.multi_cell(0, 8, f"{role}: {msg['content']}") |
|
|
pdf.ln(2) |
|
|
|
|
|
pdf_bytes = pdf.output(dest="S").encode("latin-1") |
|
|
buffer.write(pdf_bytes) |
|
|
buffer.seek(0) |
|
|
return buffer |
|
|
|
|
|
|
|
|
st.title("AI Mentor (Gemini with LangChain)") |
|
|
st.sidebar.write("Chat with your AI Mentor. Type questions or worries below π") |
|
|
|
|
|
if "messages" not in st.session_state: |
|
|
st.session_state.messages = [] |
|
|
|
|
|
for msg in st.session_state.messages: |
|
|
st.chat_message(msg["role"]).write(msg["content"]) |
|
|
|
|
|
prompt_input = st.chat_input("Write your message here...") |
|
|
|
|
|
if prompt_input: |
|
|
st.session_state.messages.append({"role": "user", "content": prompt_input}) |
|
|
with st.spinner("AI Mentor is thinking..."): |
|
|
response = ai_mentor(prompt_input) |
|
|
st.session_state.messages.append({"role": "assistant", "content": response}) |
|
|
st.chat_message("assistant").write(response) |
|
|
|
|
|
|
|
|
if st.session_state.messages: |
|
|
pdf_buffer = create_pdf_buffer(st.session_state.messages) |
|
|
st.download_button( |
|
|
label="Download chat history as PDF", |
|
|
data=pdf_buffer, |
|
|
file_name="chat_history.pdf", |
|
|
mime="application/pdf", |
|
|
) |
|
|
|