|
|
import os |
|
|
import random |
|
|
import gradio as gr |
|
|
import torch |
|
|
from transformers import pipeline |
|
|
from datetime import datetime |
|
|
|
|
|
|
|
|
FORTUNE_TYPES = ["love", "career", "health", "wealth", "general"] |
|
|
|
|
|
TAROT_CARDS = [ |
|
|
"The Fool", "The Magician", "The High Priestess", "The Empress", "The Emperor", |
|
|
"The Hierophant", "The Lovers", "The Chariot", "Strength", "The Hermit", |
|
|
"Wheel of Fortune", "Justice", "The Hanged Man", "Death", "Temperance", |
|
|
"The Devil", "The Tower", "The Star", "The Moon", "The Sun", "Judgement", "The World" |
|
|
] |
|
|
|
|
|
ZODIAC_SIGNS = [ |
|
|
"Aries", "Taurus", "Gemini", "Cancer", "Leo", "Virgo", |
|
|
"Libra", "Scorpio", "Sagittarius", "Capricorn", "Aquarius", "Pisces" |
|
|
] |
|
|
|
|
|
|
|
|
generator = None |
|
|
|
|
|
def load_model(): |
|
|
"""Load the model with GPU support""" |
|
|
global generator |
|
|
|
|
|
try: |
|
|
print("Loading model...") |
|
|
|
|
|
|
|
|
model_name = "distilgpt2" |
|
|
|
|
|
|
|
|
device = 0 if torch.cuda.is_available() else -1 |
|
|
|
|
|
|
|
|
generator = pipeline( |
|
|
"text-generation", |
|
|
model=model_name, |
|
|
device=device |
|
|
) |
|
|
|
|
|
if torch.cuda.is_available(): |
|
|
print(f"Model loaded on GPU: {torch.cuda.get_device_name(0)}") |
|
|
else: |
|
|
print("Model loaded on CPU") |
|
|
|
|
|
return True |
|
|
|
|
|
except Exception as e: |
|
|
print(f"Error loading model: {str(e)}") |
|
|
return False |
|
|
|
|
|
def generate_fortune(fortune_type, user_input=""): |
|
|
"""Generate a fortune based on type and user input""" |
|
|
|
|
|
|
|
|
prompts = { |
|
|
"love": "Tell me a romantic fortune about love and relationships:", |
|
|
"career": "Tell me a professional fortune about career and success:", |
|
|
"health": "Tell me a health-related fortune about wellness:", |
|
|
"wealth": "Tell me a financial fortune about money and prosperity:", |
|
|
"general": "Tell me a general fortune about life and future:" |
|
|
} |
|
|
|
|
|
|
|
|
tarot_card = random.choice(TAROT_CARDS) |
|
|
zodiac_sign = random.choice(ZODIAC_SIGNS) |
|
|
|
|
|
|
|
|
prompt_text = prompts.get(fortune_type, prompts["general"]) |
|
|
full_prompt = f"{prompt_text} The {tarot_card} card is present. Zodiac sign: {zodiac_sign}. {user_input}" |
|
|
|
|
|
fortune_text = "" |
|
|
|
|
|
try: |
|
|
if generator: |
|
|
|
|
|
generated = generator( |
|
|
full_prompt, |
|
|
max_length=100, |
|
|
num_return_sequences=1, |
|
|
temperature=0.9, |
|
|
do_sample=True, |
|
|
truncation=True |
|
|
) |
|
|
|
|
|
fortune_text = generated[0]['generated_text'] |
|
|
|
|
|
|
|
|
if fortune_text.startswith(full_prompt): |
|
|
fortune_text = fortune_text[len(full_prompt):].strip() |
|
|
|
|
|
|
|
|
if not fortune_text or len(fortune_text) < 10: |
|
|
fortune_text = f"The {tarot_card} reveals mysteries for {zodiac_sign}. Good fortune awaits." |
|
|
|
|
|
else: |
|
|
|
|
|
fallback_fortunes = [ |
|
|
f"The {tarot_card} card shines brightly. As a {zodiac_sign}, your path is clear.", |
|
|
f"{tarot_card} guides you. {zodiac_sign} energy brings unexpected joy.", |
|
|
f"Through {tarot_card}'s wisdom, your {zodiac_sign} journey continues.", |
|
|
f"Stars align with {tarot_card}. {zodiac_sign}'s destiny unfolds.", |
|
|
f"{tarot_card} whispers secrets. {zodiac_sign}'s future is bright." |
|
|
] |
|
|
fortune_text = random.choice(fallback_fortunes) |
|
|
|
|
|
except Exception as e: |
|
|
print(f"Error generating fortune: {str(e)}") |
|
|
fortune_text = f"The {tarot_card} card appears. {zodiac_sign}'s journey continues with promise." |
|
|
|
|
|
|
|
|
fortune_details = { |
|
|
"Tarot Card": tarot_card, |
|
|
"Zodiac Sign": zodiac_sign, |
|
|
"Lucky Number": str(random.randint(1, 99)), |
|
|
"Lucky Color": random.choice(["Red", "Blue", "Green", "Gold", "Purple", "Silver"]), |
|
|
"Element": random.choice(["Fire", "Water", "Earth", "Air"]), |
|
|
"Time": datetime.now().strftime("%Y-%m-%d %H:%M:%S") |
|
|
} |
|
|
|
|
|
return fortune_text, fortune_details |
|
|
|
|
|
def get_fortune(fortune_type, question): |
|
|
"""Gradio interface function""" |
|
|
fortune_text, details = generate_fortune(fortune_type, question) |
|
|
|
|
|
|
|
|
details_html = "<div style='margin-top: 20px; padding: 15px; background: rgba(0,0,0,0.1); border-radius: 10px;'>" |
|
|
for key, value in details.items(): |
|
|
details_html += f"<div><strong>{key}:</strong> {value}</div>" |
|
|
details_html += "</div>" |
|
|
|
|
|
|
|
|
device_info = f"<div style='margin-top: 10px; font-size: 0.9em; color: #666;'>" |
|
|
device_info += f"Running on {'GPU 🔥' if torch.cuda.is_available() else 'CPU ⚡'}" |
|
|
device_info += "</div>" |
|
|
|
|
|
return fortune_text, details_html + device_info |
|
|
|
|
|
|
|
|
load_model() |
|
|
|
|
|
|
|
|
css = """ |
|
|
.gradio-container { |
|
|
max-width: 800px !important; |
|
|
margin: 0 auto !important; |
|
|
} |
|
|
body { |
|
|
background: linear-gradient(135deg, #0f0c29, #302b63, #24243e) !important; |
|
|
} |
|
|
""" |
|
|
|
|
|
with gr.Blocks(css=css, theme=gr.themes.Soft()) as demo: |
|
|
gr.Markdown("# 🔮 Mystic Fortune Teller 🔮") |
|
|
gr.Markdown("Ask the universe for guidance and receive your personalized fortune") |
|
|
|
|
|
with gr.Row(): |
|
|
with gr.Column(scale=1): |
|
|
fortune_type = gr.Dropdown( |
|
|
choices=FORTUNE_TYPES, |
|
|
value="general", |
|
|
label="Fortune Type", |
|
|
info="Select the type of fortune you seek" |
|
|
) |
|
|
|
|
|
question = gr.Textbox( |
|
|
label="Your Question", |
|
|
placeholder="Ask your question here... (optional)", |
|
|
lines=3 |
|
|
) |
|
|
|
|
|
submit_btn = gr.Button("Get Your Fortune 🔮", variant="primary") |
|
|
|
|
|
with gr.Column(scale=2): |
|
|
fortune_output = gr.Textbox( |
|
|
label="Your Fortune", |
|
|
lines=6, |
|
|
interactive=False |
|
|
) |
|
|
|
|
|
details_output = gr.HTML( |
|
|
label="Fortune Details" |
|
|
) |
|
|
|
|
|
|
|
|
status = gr.HTML(f""" |
|
|
<div style='padding: 10px; border-radius: 5px; background: {"#d4edda" if torch.cuda.is_available() else "#fff3cd"}; |
|
|
border: 1px solid {"#c3e6cb" if torch.cuda.is_available() else "#ffeaa7"}; |
|
|
color: {"#155724" if torch.cuda.is_available() else "#856404"};'> |
|
|
⚡ Running on <strong>{'GPU' if torch.cuda.is_available() else 'CPU'}</strong> |
|
|
{f'({torch.cuda.get_device_name(0)})' if torch.cuda.is_available() else ''} |
|
|
</div> |
|
|
""") |
|
|
|
|
|
|
|
|
submit_btn.click( |
|
|
fn=get_fortune, |
|
|
inputs=[fortune_type, question], |
|
|
outputs=[fortune_output, details_output] |
|
|
) |
|
|
|
|
|
|
|
|
gr.Examples( |
|
|
examples=[ |
|
|
["love", "Will I find my soulmate this year?"], |
|
|
["career", "What career path should I follow?"], |
|
|
["wealth", "Will I achieve financial success?"], |
|
|
["health", "What should I focus on for my wellbeing?"] |
|
|
], |
|
|
inputs=[fortune_type, question] |
|
|
) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch( |
|
|
server_name="0.0.0.0", |
|
|
server_port=int(os.environ.get("PORT", 7860)), |
|
|
share=False |
|
|
) |