Spaces:
Sleeping
Sleeping
File size: 8,999 Bytes
8e00ad1 6fe5e5e 8e00ad1 7bc7c78 09b4c4b 40b9bc6 98db39e 11b31df 8e00ad1 7cb19ca 7a8243c 09b4c4b 7a8243c 6fe5e5e 98db39e 09b4c4b 7cb19ca 09b4c4b 11b31df ed08a0f 11b31df 09b4c4b 11b31df 6fe5e5e 11b31df 6fe5e5e 98db39e 6fe5e5e 11b31df 6fe5e5e ed08a0f 11b31df 98db39e 7cb19ca 11b31df 6fe5e5e 11b31df 6fe5e5e ed08a0f 11b31df 6fe5e5e 11b31df 6fe5e5e 11b31df 6fe5e5e ed08a0f 6fe5e5e 11b31df ed08a0f 6fe5e5e 11b31df 6fe5e5e 11b31df 6fe5e5e 7cb19ca 11b31df ed08a0f ac574a7 7a8243c 6fe5e5e 9fd1ca7 11b31df 9fd1ca7 11b31df ed08a0f 6fe5e5e 11b31df 09b4c4b 11b31df 09b4c4b f938d63 09b4c4b 6fe5e5e 40b9bc6 09b4c4b 7a8243c 09b4c4b 6fe5e5e 902009d 7cb19ca 902009d 11b31df 902009d 11b31df 902009d 09b4c4b 7a8243c 09b4c4b 7a8243c 902009d 09b4c4b 902009d 7a8243c ed08a0f 8e00ad1 7a8243c 98db39e 7a8243c 11b31df 8e00ad1 98db39e 8e00ad1 11b31df 8e00ad1 cd816ec b6466e8 11b31df cd816ec 902009d 8e00ad1 11b31df 7a8243c 11b31df 8e00ad1 902009d 11b31df ac574a7 902009d ac574a7 7a8243c 6fe5e5e 11b31df 7a8243c 8e00ad1 b6466e8 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 |
import gradio as gr
from huggingface_hub import InferenceClient, HfFolder
import os
import requests
import random
import re
import tempfile
import json
# --- CORE FUNCTIONS ---
def get_client():
"""Get Hugging Face client, handle token error."""
token = os.environ.get("HF_TOKEN")
if not token:
try: token = HfFolder.get_token()
except Exception: pass
if not token:
return None, "β **HuggingFace Token Required**\n\n**Setup:**\n1. Go to Space Settings β Repository Secrets\n2. Add secret: Name=`HF_TOKEN`, Value=(your HF token)\n3. Get token: https://huggingface.co/settings/tokens\n4. Restart Space"
return InferenceClient(token=token), None
def generate_meme_content(idea: str):
"""
In a single AI call, choose the best template AND generate the text.
Returns: template_name, top_text, bottom_text, error, model_used
"""
client, error = get_client()
if error: return None, None, None, error, None
template_descriptions = "\n".join([f"- {name}: {desc}" for name, desc in TEMPLATE_GUIDANCE.items()])
MODELS_TO_TRY = ["mistralai/Mistral-7B-Instruct-v0.2", "HuggingFaceH4/zephyr-7b-beta"]
failed_models = []
for model_id in MODELS_TO_TRY:
try:
prompt = f"""You are an AI expert in meme culture. Your task is to analyze an idea, choose the best meme template, and generate a funny caption.
**1. Analyze the user's idea:** "{idea}"
**2. Choose the single best meme template from this list:**
{template_descriptions}
**3. Generate a funny, two-line caption for the chosen template.**
**4. Format your response as a single, valid JSON object with three keys: "template", "top_text", "bottom_text". Do not add any extra text or explanations outside of the JSON object.**
Example Response:
{{
"template": "Drake",
"top_text": "Manually selecting a meme template",
"bottom_text": "Letting the AI choose the template automatically"
}}
Your JSON response:"""
messages = [{"role": "user", "content": prompt}]
response_stream = client.chat_completion(
messages, model=model_id, max_tokens=150, temperature=0.8, stream=False
)
response_text = response_stream.choices[0].message.content
# --- FINAL FIX: Use a non-greedy regex to find the first JSON object ---
json_match = re.search(r'\{.*?\}', response_text, re.DOTALL)
if not json_match:
failed_models.append(f"{model_id.split('/')[-1]} (bad format)")
continue
parsed_json = json.loads(json_match.group(0))
template = parsed_json.get("template")
top_text = parsed_json.get("top_text")
bottom_text = parsed_json.get("bottom_text")
if not all([template, top_text, bottom_text]) or template not in MEME_TEMPLATES:
failed_models.append(f"{model_id.split('/')[-1]} (invalid content)")
continue
return template, top_text, bottom_text, None, model_id
except (Exception, json.JSONDecodeError) as e:
error_msg = str(e).lower()
if "404" in error_msg or "503" in error_msg or "is currently loading" in error_msg or "invalid json" in error_msg:
failed_models.append(f"{model_id.split('/')[-1]} ({type(e).__name__})")
continue
else:
return None, None, None, f"β **AI Error:** {str(e)[:250]}", model_id
return None, None, None, f"β **All AI Models Are Offline or Failing**\n\n**Models Tried:** {', '.join(failed_models)}", None
def create_meme(idea: str):
"""Main function to generate the complete meme."""
if not idea or len(idea.strip()) < 3:
return None, "β Please enter a meme idea (at least 3 characters)!"
imgflip_user = os.environ.get("IMGFLIP_USERNAME")
imgflip_pass = os.environ.get("IMGFLIP_PASSWORD")
if not imgflip_user or not imgflip_pass:
return None, "β **ImgFlip Credentials Required in Secrets**"
template_name, top, bottom, error, model_used = generate_meme_content(idea)
if error: return None, error
template_id = MEME_TEMPLATES.get(template_name)
url = "https://api.imgflip.com/caption_image"
payload = {
'template_id': template_id, 'username': imgflip_user,
'password': imgflip_pass, 'text0': top, 'text1': bottom
}
try:
response = requests.post(url, data=payload, timeout=20)
response.raise_for_status()
data = response.json()
if data.get('success'):
meme_url = data['data']['url']
img_response = requests.get(meme_url, timeout=20)
img_response.raise_for_status()
with tempfile.NamedTemporaryFile(delete=False, suffix=".jpg") as tmpfile:
tmpfile.write(img_response.content)
temp_path = tmpfile.name
status_message = (f"β
**Success!**\n\n"
f"π§ **AI Chose:** {template_name}\n"
f"π **Top Text:** {top}\n"
f"π **Bottom Text:** {bottom}\n\n"
f"π€ **Model Used:** {model_used.split('/')[-1] if model_used else 'N/A'}")
return temp_path, status_message
else:
return None, f"β **ImgFlip API Error:** {data.get('error_message', 'Unknown error')}"
except requests.exceptions.RequestException as e:
return None, f"β **Network Error:** Could not connect to ImgFlip API. {str(e)}"
except Exception as e:
return None, f"β **An unexpected error occurred:** {str(e)}"
# --- CONFIGURATION & UI (No changes needed) ---
MEME_TEMPLATES = {
"Drake": "181913649", "Distracted Boyfriend": "112126428", "Two Buttons": "87743020",
"Expanding Brain": "93895088", "Success Kid": "61544", "Batman Slapping Robin": "438680",
"Change My Mind": "129242436", "Woman Yelling at a Cat": "188390779", "Surprised Pikachu": "155067746",
}
TEMPLATE_GUIDANCE = {
"Drake": "Represents choosing one thing (good) over another (bad). Good for showing preference.",
"Distracted Boyfriend": "Represents being tempted by something new while neglecting something you already have.",
"Two Buttons": "Represents a difficult choice, a dilemma, or inner conflict.",
"Expanding Brain": "Shows increasing levels of enlightenment or absurdity on a topic.",
"Success Kid": "Represents a small victory, unexpected success, or relief.",
"Batman Slapping Robin": "Represents a sharp rebuke or correction of a silly idea.",
"Change My Mind": "For presenting a controversial opinion that you are confident about.",
"Woman Yelling at a Cat": "Represents a misunderstanding, with one side angry and the other confused.",
"Surprised Pikachu": "Represents feigned surprise at an obvious outcome.",
}
examples = [
["When you fix a bug you don't understand"], ["My plans for the weekend vs. what I actually do"],
["Saying you'll just have one slice of pizza"], ["Me pretending to be productive in a Zoom meeting"],
]
with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue", secondary_hue="sky"), title="Intelligent AI Meme Generator") as demo:
gr.HTML("""
<div style='text-align: center; background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
padding: 30px; border-radius: 15px; color: white; margin-bottom: 20px;'>
<h1>π§ Intelligent AI Meme Generator</h1>
<h3>Enter an idea and let the AI choose the best meme template for you!</h3>
</div>
""")
with gr.Row():
with gr.Column(scale=2):
idea_input = gr.Textbox(label="π¨ Your Meme Idea", placeholder="Example: When the CI/CD pipeline finally passes...", lines=4)
generate_button = gr.Button("π Generate Meme", variant="primary", size="lg")
with gr.Accordion("How does this work?", open=False):
gr.Markdown("""
This app is **intelligent and resilient**. When you click generate:
1. The AI analyzes your idea and chooses the best meme template from its knowledge base.
2. It then generates a funny caption for that specific template.
3. It automatically finds a working AI model from a list of free options.
""")
with gr.Column(scale=1):
output_image = gr.Image(label="πΌοΈ Your Generated Meme", type="filepath", show_download_button=True)
output_status = gr.Textbox(label="π Status & Details", lines=5, show_copy_button=True)
gr.Examples(examples=examples, inputs=[idea_input], label="π‘ Meme Ideas to Try")
generate_button.click(
fn=create_meme,
inputs=[idea_input],
outputs=[output_image, output_status]
)
if __name__ == "__main__":
demo.launch() |