Ayush_Alter_Ego / app.py
Ayush239's picture
Update app.py
f0a0781 verified
raw
history blame
6.4 kB
# app.py — NVIDIA NIM + Tool Calling + Gradio Chatbot
# Model: meta/llama3-8b-instruct (supports OpenAI-style tools)
# Works with your nvapi-xxxx key on HuggingFace Spaces.
import os
import json
import requests
from pypdf import PdfReader
from openai import OpenAI
import gradio as gr
# ===============================
# ENVIRONMENT CONFIG
# ===============================
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY") # your nvapi-xxxx key
BASE_URL = "https://integrate.api.nvidia.com/v1"
MODEL = "meta/llama3-8b-instruct"
PUSHOVER_TOKEN = os.environ.get("PUSHOVER_TOKEN")
PUSHOVER_USER = os.environ.get("PUSHOVER_USER")
client = OpenAI(api_key=OPENAI_API_KEY, base_url=BASE_URL)
# ===============================
# OPTIONAL: Pushover notification
# ===============================
def push(text):
try:
if not PUSHOVER_TOKEN or not PUSHOVER_USER:
print("Pushover not configured:", text)
return
requests.post(
"https://api.pushover.net/1/messages.json",
data={
"token": PUSHOVER_TOKEN,
"user": PUSHOVER_USER,
"message": text,
},
timeout=10
)
except Exception as e:
print("Pushover failed:", e)
# ===============================
# TOOL IMPLEMENTATIONS
# ===============================
def record_user_details(email, name="Name not provided", notes="not provided"):
push(f"Lead captured → {name} | {email} | Notes: {notes}")
return {"status": "ok", "email": email, "name": name, "notes": notes}
def record_unknown_question(question):
push(f"Unknown question: {question}")
return {"status": "ok", "question": question}
# Register tools globally
globals()["record_user_details"] = record_user_details
globals()["record_unknown_question"] = record_unknown_question
# ===============================
# TOOL JSON DEFINITIONS
# ===============================
tools = [
{
"type": "function",
"function": {
"name": "record_user_details",
"description": "Record user's interest and email.",
"parameters": {
"type": "object",
"properties": {
"email": {"type": "string"},
"name": {"type": "string"},
"notes": {"type": "string"},
},
"required": ["email"]
}
}
},
{
"type": "function",
"function": {
"name": "record_unknown_question",
"description": "Record any question the assistant cannot answer.",
"parameters": {
"type": "object",
"properties": {
"question": {"type": "string"},
},
"required": ["question"]
}
}
}
]
# ===============================
# MAIN ASSISTANT CLASS
# ===============================
class Me:
def __init__(self):
self.name = "Ayush Tyagi"
self.summary = ""
self.linkedin_text = ""
# Load summary file
if os.path.exists("me/summary.txt"):
self.summary = open("me/summary.txt", "r", encoding="utf-8").read()
# Load PDF if exists
pdf_path = "me/Ayush_linkdin.pdf"
if os.path.exists(pdf_path):
text = []
reader = PdfReader(pdf_path)
for page in reader.pages:
page_text = page.extract_text()
if page_text:
text.append(page_text)
self.linkedin_text = "\n\n".join(text)
def system_prompt(self):
return f"""
You are acting as {self.name}. You answer questions about his background,
skills, experience, portfolio, and career.
If you DO NOT know something:
➡ Use the tool: record_unknown_question
If the user is interested in working with Ayush:
➡ Ask for their email and call: record_user_details
Be friendly, confident, and professional.
### Summary:
{self.summary}
### LinkedIn Extract:
{self.linkedin_text}
"""
# ===============================
# CHAT LOOP WITH TOOL CALLING
# ===============================
def chat(self, message, history):
messages = [{"role": "system", "content": self.system_prompt()}]
# FIXED: Handle both tuple-history & dict-history
for item in history:
if isinstance(item, (list, tuple)) and len(item) == 2:
user_msg, bot_msg = item
if user_msg:
messages.append({"role": "user", "content": user_msg})
if bot_msg:
messages.append({"role": "assistant", "content": bot_msg})
elif isinstance(item, dict) and "role" in item and "content" in item:
messages.append({"role": item["role"], "content": item["content"]})
messages.append({"role": "user", "content": message})
while True:
response = client.chat.completions.create(
model=MODEL,
messages=messages,
tools=tools,
tool_choice="auto",
max_tokens=600
)
choice = response.choices[0]
finish = choice.finish_reason
msg = choice.message
# ---- TOOL CALL ----
if finish == "tool_calls":
messages.append(msg.dict())
for tool_call in msg.tool_calls:
func = tool_call.function
name = func.name
args = json.loads(func.arguments)
tool_fn = globals().get(name)
result = tool_fn(**args)
messages.append({
"role": "tool",
"tool_call_id": tool_call.id,
"content": json.dumps(result)
})
# Let model continue after tool execution
continue
# ---- NORMAL RESPONSE ----
return msg.content
# ===============================
# GRADIO APP
# ===============================
me = Me()
ui = gr.ChatInterface(
fn=me.chat,
title="Ayush Tyagi — Personal Assistant",
type="messages"
)
if __name__ == "__main__":
ui.launch(server_name="0.0.0.0", server_port=int(os.environ.get("PORT", 7860)))