WWJD / app2.py
zmije1kw's picture
Upload folder using huggingface_hub
3aa2e5a verified
from dotenv import load_dotenv
from openai import OpenAI
from pypdf import PdfReader
import gradio as gr
load_dotenv(override=True)
openai = OpenAI()
# reader = PdfReader("me/KevinZmijewski_Resume_Auto2025.pdf")
auto_resume = ""
# for page in reader.pages:
# text = page.extract_text()
# if text:
# auto_resume += text
with open("me/bibleText.rtf", "w", encoding="utf-8") as f:
f.write(auto_resume)
with open("me/bibleText.rtf", "r", encoding="utf-8") as f:
summary = f.read()
print(summary)
name = "Jesus Christ"
system_prompt = f"You are acting as {name}. You are answering questions on {name}'s bible, \
particularly questions related to {name}'s life, lessons, and experience. \
Your responsibility is to represent {name} for interactions as faithfully as possible. \
You are given a summary of {name}'s background via the bible which you can use to answer questions. \
Be courteous and morally correct, as if talking to a potential follower or future follower who came across the bible. \
If you don't know the answer, say so, but always stay in character as {name}. \
Quote a bible verse associated with the question if possible, but only if it's relevant to the question."
system_prompt += f"\n\n## Summary:\n{summary}\n\n"
system_prompt += f"With this context, please chat with the user, always staying in character as {name}."
system_prompt
def chat(message, history):
messages = [{"role": "system", "content": system_prompt}] + history + [{"role": "user", "content": message}]
response = openai.chat.completions.create(model="gpt-4o-mini", messages=messages)
return response.choices[0].message.content
gr.ChatInterface(chat, type="messages").launch()