faizee07 commited on
Commit
40b9bc6
Β·
verified Β·
1 Parent(s): 7a8243c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -59
app.py CHANGED
@@ -3,6 +3,7 @@ from huggingface_hub import InferenceClient
3
  import os
4
  import requests
5
  import random
 
6
 
7
  # --- CORE FUNCTIONS ---
8
 
@@ -20,52 +21,45 @@ def generate_meme_text(idea: str, model: str):
20
  return None, None, error
21
 
22
  try:
23
- # A robust prompt that works well with smaller models
24
- prompt = f"""You are a meme generator. Create funny text for a meme about "{idea}".
25
-
26
- Respond ONLY in this exact format:
27
- TOP: [Top text, max 8 words]
28
- BOTTOM: [Bottom text, max 8 words]
29
-
30
- Example for "code works on first try":
31
- TOP: My code works
32
- BOTTOM: And I don't know why
33
-
34
- Now, generate for "{idea}":
35
- TOP:"""
36
 
37
  # Using text_generation which is compatible with these models
38
  response = client.text_generation(
39
  prompt,
40
  model=model,
41
  max_new_tokens=100,
42
- temperature=0.8,
43
  return_full_text=False,
44
  do_sample=True,
 
45
  )
46
 
47
  # --- Robust Parsing Logic ---
48
- full_response = "TOP:" + response
49
- lines = full_response.strip().split('\n')
50
-
51
  top_text = ""
52
  bottom_text = ""
53
 
54
- for line in lines:
55
- line = line.strip()
56
- if line.startswith("TOP:") and not top_text:
57
- top_text = line.replace("TOP:", "").strip().strip('[]"\'.,')
58
- elif line.startswith("BOTTOM:") and not bottom_text:
59
- bottom_text = line.replace("BOTTOM:", "").strip().strip('[]"\'.,')
 
 
60
 
61
  # Fallback if AI gives a weird response
62
  if not top_text or not bottom_text:
63
- words = idea.split()
64
- mid_point = max(1, len(words) // 2)
65
- top_text = ' '.join(words[:mid_point])
66
- bottom_text = ' '.join(words[mid_point:])
67
-
68
- # Ensure text is not empty
 
 
 
 
69
  if not top_text.strip(): top_text = "Top Text"
70
  if not bottom_text.strip(): bottom_text = "Bottom Text"
71
 
@@ -76,7 +70,9 @@ TOP:"""
76
  if "rate limit" in error_msg:
77
  return None, None, "❌ **Rate Limit Exceeded**\n\nWait 60 seconds and try again. Free tier has limits."
78
  elif "503" in error_msg:
79
- return None, None, f"❌ **Model is Loading**\n\nWait 30 seconds for '{model}' to wake up and try again."
 
 
80
  else:
81
  return None, None, f"❌ **AI Error:** {error_msg[:200]}"
82
 
@@ -94,7 +90,6 @@ def create_meme(idea: str, template: str, model: str):
94
  template_id = MEME_TEMPLATES.get(template, "181913649") # Default to Drake
95
  url = "https://api.imgflip.com/caption_image"
96
 
97
- # Using public credentials for ImgFlip API
98
  payload = {
99
  'template_id': template_id,
100
  'username': 'imgflip_huggingface',
@@ -105,13 +100,12 @@ def create_meme(idea: str, template: str, model: str):
105
 
106
  try:
107
  response = requests.post(url, data=payload, timeout=15)
108
- response.raise_for_status() # Raise error for bad responses
109
  data = response.json()
110
 
111
  if data.get('success'):
112
  meme_url = data['data']['url']
113
 
114
- # Download image locally to display in Gradio
115
  img_response = requests.get(meme_url, timeout=15)
116
  if img_response.status_code == 200:
117
  temp_path = f"/tmp/meme_{random.randint(1000, 9999)}.jpg"
@@ -138,11 +132,11 @@ def create_meme(idea: str, template: str, model: str):
138
 
139
  # --- CONFIGURATION & UI ---
140
 
141
- # βœ… WORKING MODELS YOU PROVIDED
142
  MODELS = {
143
- "Mistral 7B (Fast & Reliable)": "mistralai/Mistral-7B-Instruct-v0.3",
144
- "Zephyr 7B": "HuggingFaceH4/zephyr-7b-beta",
145
- "Phi-3 Mini": "microsoft/Phi-3-mini-4k-instruct"
146
  }
147
 
148
  # Popular meme templates
@@ -159,10 +153,9 @@ MEME_TEMPLATES = {
159
 
160
  # Example prompts
161
  examples = [
162
- ["When you finally fix the bug at 3 AM", "Success Kid", "Mistral 7B (Fast & Reliable)"],
163
- ["Junior dev vs Senior dev looking at same error", "Drake", "Mistral 7B (Fast & Reliable)"],
164
- ["My code in development vs in production", "Distracted Boyfriend", "Zephyr 7B"],
165
- ["Trying to explain AI to my parents", "Expanding Brain", "Phi-3 Mini"],
166
  ]
167
 
168
  # Gradio UI
@@ -172,7 +165,7 @@ with gr.Blocks(theme=gr.themes.Soft(), title="AI Meme Generator") as demo:
172
  <div style='text-align: center; background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
173
  padding: 30px; border-radius: 15px; color: white; margin-bottom: 20px;'>
174
  <h1>πŸ₯Έ AI Meme Generator</h1>
175
- <h3>Powered by Actually Working HuggingFace Models πŸ€—</h3>
176
  </div>
177
  """)
178
 
@@ -180,32 +173,22 @@ with gr.Blocks(theme=gr.themes.Soft(), title="AI Meme Generator") as demo:
180
 
181
  with gr.Row():
182
  with gr.Column(scale=2):
183
- idea_input = gr.Textbox(
184
- label="🎨 Your Meme Idea",
185
- placeholder="Example: When the client says 'just one small change'...",
186
- lines=2,
187
- )
188
-
189
  with gr.Row():
190
- template_dropdown = gr.Dropdown(
191
- choices=list(MEME_TEMPLATES.keys()), value="Drake", label="πŸ–ΌοΈ Meme Template"
192
- )
193
- model_dropdown = gr.Dropdown(
194
- choices=list(MODELS.keys()), value="Mistral 7B (Fast & Reliable)", label="πŸ€– AI Model"
195
- )
196
-
197
  generate_button = gr.Button("πŸš€ Generate Meme", variant="primary", size="lg")
198
 
199
  with gr.Column(scale=1):
200
  gr.Markdown("""
201
  ### πŸ“– How to Use
202
 
203
- 1. **Enter Your Idea:** Describe the meme.
204
- 2. **Pick Template:** Choose a meme format.
205
- 3. **Choose AI:** Mistral is fast and reliable.
206
  4. **Click Generate!**
207
 
208
- **Setup (First Time Only):**
209
  - Get [HF Token](https://huggingface.co/settings/tokens)
210
  - Go to Settings β†’ Secrets
211
  - Add `HF_TOKEN`
 
3
  import os
4
  import requests
5
  import random
6
+ import re
7
 
8
  # --- CORE FUNCTIONS ---
9
 
 
21
  return None, None, error
22
 
23
  try:
24
+ # A simple, direct prompt for older models
25
+ prompt = f"<|prompter|>Create funny meme text for: \"{idea}\". Respond ONLY in the format:\nTOP: [Top text]\nBOTTOM: [Bottom text]<|endoftext|><|assistant|>"
 
 
 
 
 
 
 
 
 
 
 
26
 
27
  # Using text_generation which is compatible with these models
28
  response = client.text_generation(
29
  prompt,
30
  model=model,
31
  max_new_tokens=100,
32
+ temperature=0.9,
33
  return_full_text=False,
34
  do_sample=True,
35
+ stop_sequences=["<|", "\n\n"],
36
  )
37
 
38
  # --- Robust Parsing Logic ---
 
 
 
39
  top_text = ""
40
  bottom_text = ""
41
 
42
+ # Use regex to find TOP: and BOTTOM:
43
+ top_match = re.search(r"TOP:\s*(.*)", response, re.IGNORECASE)
44
+ bottom_match = re.search(r"BOTTOM:\s*(.*)", response, re.IGNORECASE)
45
+
46
+ if top_match:
47
+ top_text = top_match.group(1).strip().strip('[]"\'.,')
48
+ if bottom_match:
49
+ bottom_text = bottom_match.group(1).strip().strip('[]"\'.,')
50
 
51
  # Fallback if AI gives a weird response
52
  if not top_text or not bottom_text:
53
+ lines = response.strip().split('\n')
54
+ if len(lines) >= 2:
55
+ top_text = lines[0].replace("TOP:", "").strip()
56
+ bottom_text = lines[1].replace("BOTTOM:", "").strip()
57
+ else: # Final fallback
58
+ words = idea.split()
59
+ mid_point = max(1, len(words) // 2)
60
+ top_text = ' '.join(words[:mid_point])
61
+ bottom_text = ' '.join(words[mid_point:])
62
+
63
  if not top_text.strip(): top_text = "Top Text"
64
  if not bottom_text.strip(): bottom_text = "Bottom Text"
65
 
 
70
  if "rate limit" in error_msg:
71
  return None, None, "❌ **Rate Limit Exceeded**\n\nWait 60 seconds and try again. Free tier has limits."
72
  elif "503" in error_msg:
73
+ return None, None, f"❌ **Model is Loading**\n\nWait 30 seconds for the model to wake up and try again."
74
+ elif "404" in error_msg:
75
+ return None, None, f"❌ **Model Not Found**\n\nThe model '{model}' is not available on the free tier right now. This can change frequently."
76
  else:
77
  return None, None, f"❌ **AI Error:** {error_msg[:200]}"
78
 
 
90
  template_id = MEME_TEMPLATES.get(template, "181913649") # Default to Drake
91
  url = "https://api.imgflip.com/caption_image"
92
 
 
93
  payload = {
94
  'template_id': template_id,
95
  'username': 'imgflip_huggingface',
 
100
 
101
  try:
102
  response = requests.post(url, data=payload, timeout=15)
103
+ response.raise_for_status()
104
  data = response.json()
105
 
106
  if data.get('success'):
107
  meme_url = data['data']['url']
108
 
 
109
  img_response = requests.get(meme_url, timeout=15)
110
  if img_response.status_code == 200:
111
  temp_path = f"/tmp/meme_{random.randint(1000, 9999)}.jpg"
 
132
 
133
  # --- CONFIGURATION & UI ---
134
 
135
+ # βœ… STABLE & RELIABLE FREE MODELS
136
  MODELS = {
137
+ "OpenAssistant Pythia 12B (Stable)": "OpenAssistant/oasst-sft-4-pythia-12b-epoch-3.5",
138
+ "Zephyr 7B (Good Alternative)": "HuggingFaceH4/zephyr-7b-beta",
139
+ "Mistral 7B v0.2": "mistralai/Mistral-7B-Instruct-v0.2",
140
  }
141
 
142
  # Popular meme templates
 
153
 
154
  # Example prompts
155
  examples = [
156
+ ["When you finally fix the bug at 3 AM", "Success Kid", "OpenAssistant Pythia 12B (Stable)"],
157
+ ["Junior dev vs Senior dev looking at same error", "Drake", "Zephyr 7B (Good Alternative)"],
158
+ ["My code in development vs in production", "Distracted Boyfriend", "Mistral 7B v0.2"],
 
159
  ]
160
 
161
  # Gradio UI
 
165
  <div style='text-align: center; background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
166
  padding: 30px; border-radius: 15px; color: white; margin-bottom: 20px;'>
167
  <h1>πŸ₯Έ AI Meme Generator</h1>
168
+ <h3>Powered by Stable Free HuggingFace Models πŸ€—</h3>
169
  </div>
170
  """)
171
 
 
173
 
174
  with gr.Row():
175
  with gr.Column(scale=2):
176
+ idea_input = gr.Textbox(label="🎨 Your Meme Idea", placeholder="Example: When the client says 'just one small change'...", lines=2)
 
 
 
 
 
177
  with gr.Row():
178
+ template_dropdown = gr.Dropdown(choices=list(MEME_TEMPLATES.keys()), value="Drake", label="πŸ–ΌοΈ Meme Template")
179
+ model_dropdown = gr.Dropdown(choices=list(MODELS.keys()), value="OpenAssistant Pythia 12B (Stable)", label="πŸ€– AI Model (Free Tier)")
 
 
 
 
 
180
  generate_button = gr.Button("πŸš€ Generate Meme", variant="primary", size="lg")
181
 
182
  with gr.Column(scale=1):
183
  gr.Markdown("""
184
  ### πŸ“– How to Use
185
 
186
+ 1. **Enter Idea:** Describe the meme.
187
+ 2. **Pick Template:** Choose a format.
188
+ 3. **Choose AI:** OpenAssistant is very stable.
189
  4. **Click Generate!**
190
 
191
+ **Setup (First Time):**
192
  - Get [HF Token](https://huggingface.co/settings/tokens)
193
  - Go to Settings β†’ Secrets
194
  - Add `HF_TOKEN`