Spaces:
Sleeping
Sleeping
File size: 2,758 Bytes
9e39257 1194cdb 215bbce 1194cdb 9e39257 f23f753 9e39257 6ff7a33 9e39257 f23f753 9e39257 f23f753 9e39257 215bbce 6ff7a33 9e39257 215bbce 9e39257 f23f753 6ff7a33 1194cdb 9e39257 1194cdb 215bbce 1194cdb 9e39257 1194cdb 6ff7a33 1194cdb f23f753 9e39257 215bbce 9e39257 f23f753 215bbce 1194cdb 6ff7a33 1194cdb 6ff7a33 1194cdb 6ff7a33 1194cdb 215bbce 1194cdb 215bbce 1194cdb 215bbce f23f753 215bbce 1194cdb 215bbce 1194cdb 215bbce 1194cdb | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 | import os
import gradio as gr
from groq import Groq
from dotenv import load_dotenv
from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import inch
# Load API Key
load_dotenv()
GROQ_API_KEY = os.getenv("Freelancing_key")
client = Groq(api_key=GROQ_API_KEY)
# ===============================
# AI RESPONSE FUNCTION (NEW FORMAT)
# ===============================
def generate_response(task, user_input, tone, history):
if history is None:
history = []
prompt = f"""
Task: {task}
Tone: {tone}
User Request: {user_input}
"""
messages = [
{"role": "system", "content": "You are a professional AI business assistant."}
]
# Add previous chat correctly
for message in history:
messages.append(message)
messages.append({"role": "user", "content": prompt})
response = client.chat.completions.create(
model="llama-3.3-70b-versatile",
messages=messages,
temperature=0.7
)
answer = response.choices[0].message.content
# Append in NEW format
history.append({"role": "user", "content": user_input})
history.append({"role": "assistant", "content": answer})
return history, history
# ===============================
# PDF DOWNLOAD FUNCTION
# ===============================
def download_pdf(history):
file_path = "AI_Agent_Chat.pdf"
doc = SimpleDocTemplate(file_path)
elements = []
styles = getSampleStyleSheet()
for message in history:
role = message["role"]
content = message["content"]
elements.append(Paragraph(f"<b>{role.capitalize()}:</b> {content}", styles["Normal"]))
elements.append(Spacer(1, 0.4 * inch))
doc.build(elements)
return file_path
# ===============================
# GRADIO UI (NEW FORMAT)
# ===============================
with gr.Blocks() as demo:
gr.Markdown("# 💰 Freelancing AI Agent Pro")
chatbot = gr.Chatbot() # NEW version auto-detects message format
state = gr.State([])
task = gr.Dropdown(
["Proposal Writer", "Email Writer", "Marketing Post", "Business Idea Generator"],
label="Select Task"
)
tone = gr.Dropdown(
["Professional", "Friendly", "Confident", "Persuasive"],
label="Select Tone"
)
user_input = gr.Textbox(label="Enter Your Request")
generate_btn = gr.Button("Generate")
download_btn = gr.Button("Download as PDF")
generate_btn.click(
generate_response,
inputs=[task, user_input, tone, state],
outputs=[chatbot, state]
)
download_btn.click(
download_pdf,
inputs=[state],
outputs=gr.File()
)
demo.launch() |