Spaces:
Build error
Build error
File size: 4,336 Bytes
05f1d97 da466c6 05f1d97 eeae524 1e8d370 05f1d97 6fe40f1 05f1d97 1e8d370 05f1d97 eeae524 da466c6 6fe40f1 eeae524 da466c6 6fe40f1 eeae524 da466c6 6fe40f1 da466c6 1e8d370 eeae524 da466c6 1e8d370 eeae524 da466c6 1e8d370 eeae524 1e8d370 eeae524 1e8d370 da466c6 eeae524 da466c6 05f1d97 eeae524 da466c6 eeae524 1e8d370 da466c6 eeae524 da466c6 eeae524 da466c6 1e8d370 da466c6 eeae524 1e8d370 da466c6 05f1d97 da466c6 1e8d370 eeae524 1e8d370 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 |
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
import re
# Load model and tokenizer
model_name = "microsoft/DialoGPT-small"
tokenizer = AutoTokenizer.from_pretrained(model_name)
tokenizer.pad_token = tokenizer.eos_token
model = AutoModelForCausalLM.from_pretrained(model_name)
model.config.pad_token_id = tokenizer.eos_token_id
# System prompt for company context
SYSTEM_PROMPT = """You are MediaDecode's AI assistant, a professional IT services and software development company.
Provide concise, technical yet friendly answers about:
- Web/Mobile Development (Java, PHP, .NET, C++)
- Product Design & UI/UX
- Staff Augmentation
- Pricing Plans ($129.99-$189.99/month)
- Contact: +1 (647) 928-8768 | info@mediadecode.com
If unsure, direct users to contact sales/support."""
# FAQ patterns
FAQ_RESPONSES = {
r"(?i)(contact|phone|email|address)": "π **Contact Us:**\n- Canada: +1 (647) 928-8768\n- Pakistan: +92 307 5090706\nβοΈ **Email:** info@mediadecode.com\nπ **Addresses:** [See our website](https://mediadecode.com)",
r"(?i)(service|offer|provide)": "π **Our Services:**\n1. Custom Software Development\n2. Web/Mobile Apps (Java/PHP/.NET)\n3. UI/UX Design\n4. Staff Augmentation\n5. AI/ML Solutions\n\nπ‘ See all: [Services Page](https://mediadecode.com/services)",
r"(?i)(price|plan|cost)": "π° **Pricing Plans:**\n- Basic: $129.99/mo\n- Economy: $159.99/mo\n- Premium: $189.99/mo\n\nIncludes consulting, 24/7 monitoring, and security. [Learn more](https://mediadecode.com/pricing)",
r"(?i)(portfolio|projects|showcase)": "π¨ **Portfolio:** We've delivered 100+ projects in healthcare, e-commerce, and more. Explore: [Our Showcase](https://mediadecode.com/showcase)",
r"(?i)(technology|tech stack|tools)": "βοΈ **Technologies:**\n- Web: Java, PHP, .NET, C++\n- Mobile: iOS, Android\n- AI/ML, IoT, Wearables\n\nπ Full list: [Tech Stack](https://mediadecode.com/technology)"
}
# Fallback message
DEFAULT_RESPONSE = """π€ I couldn't find a specific answer. For detailed inquiries:
- Call: +1 (647) 928-8768
- Email: info@mediadecode.com
- Visit: [Support Page](https://mediadecode.com/contact)"""
def respond(message, chat_history):
# Clean the input message
message = message.strip().lower()
# Check for empty message
if not message:
return "Please enter a valid question or query."
# Check for greeting
if any(greeting in message for greeting in ["hi", "hello", "hey"]):
return "Hello! Welcome to MediaDecode support. How can I help you today?"
# Check for predefined FAQs
for pattern, response in FAQ_RESPONSES.items():
if re.search(pattern, message):
return response
# Generate AI response
prompt = f"{SYSTEM_PROMPT}\nUser: {message}\nAI:"
inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=512)
with torch.no_grad():
outputs = model.generate(
inputs.input_ids,
max_length=200,
pad_token_id=tokenizer.eos_token_id,
temperature=0.7,
do_sample=True
)
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
response = response.split("AI:")[-1].strip()
# Check for fallback conditions
if not response or len(response) < 5 or "i don't know" in response.lower():
return DEFAULT_RESPONSE
return response
# Gradio interface
with gr.Blocks(theme=gr.themes.Soft()) as demo:
gr.Markdown("""
# π MediaDecode Support Bot
**Ask about:** Services | Pricing | Technologies | Contact
*Powered by AI + Company Knowledge Base*
""")
chatbot = gr.Chatbot(height=400, label="Chat History")
msg = gr.Textbox(label="Your Query", placeholder="e.g., What's your pricing for Java development?")
clear = gr.Button("Clear Chat")
def user(user_message, history):
return "", history + [[user_message, None]]
def bot(history):
user_message = history[-1][0]
bot_response = respond(user_message, history[:-1])
history[-1][1] = bot_response
return history
msg.submit(user, [msg, chatbot], [msg, chatbot]).then(
bot, chatbot, chatbot
)
clear.click(lambda: None, None, chatbot, queue=False)
demo.launch() |