jsakshi commited on
Commit
9fb4d89
·
verified ·
1 Parent(s): fe2994b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +69 -96
app.py CHANGED
@@ -364,74 +364,69 @@ from dotenv import load_dotenv
364
  load_dotenv()
365
 
366
  # Hugging Face configuration
367
- HF_TOKEN = os.getenv("HF_TOKEN") # Make sure this matches your .env variable name
368
  TEXT_API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.2"
369
  IMAGE_API_URL = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-xl-base-1.0"
370
- HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"} # Correct header format
371
 
372
  def generate_blog_content(topic, tone="professional", length="medium"):
373
- current_date = datetime.now().strftime("%B %d, %Y")
374
- reading_time = {"short": "5-8", "medium": "8-12", "long": "15-20"}[length]
 
375
 
376
- # Proper Mistral instruction format
377
- prompt = f"""<s>[INST] Write a professional blog post about {topic} with this structure:
378
- - Title and subtitle
379
- - Introduction with statistics
380
- - 2 main sections with subsections
381
- - Conclusion
382
- - Markdown formatting
383
- - Published date: {current_date}
384
- - Reading time: {reading_time} minutes
385
- Use {tone} tone [/INST]</s>"""
386
 
387
- payload = {
388
- "inputs": prompt,
389
- "parameters": {
390
- "max_new_tokens": 1024,
391
- "temperature": 0.7,
392
- "return_full_text": False # Important to exclude original prompt
 
393
  }
394
- }
395
 
396
- try:
397
  response = requests.post(TEXT_API_URL, headers=HEADERS, json=payload)
398
 
399
  if response.status_code == 503:
400
- # Handle model loading
401
- estimate = response.json()['estimated_time']
402
  time.sleep(estimate)
403
  response = requests.post(TEXT_API_URL, headers=HEADERS, json=payload)
404
 
405
  response.raise_for_status()
406
-
407
- result = response.json()
408
- return result[0]['generated_text']
409
 
410
  except Exception as e:
411
- return f"Error: {str(e)} - {response.text if 'response' in locals() else ''}"
412
-
413
 
414
  def generate_featured_image(topic):
415
- prompt = f"Professional digital illustration for blog about {topic}, high quality, trending on artstation"
416
-
417
- payload = {
418
- "inputs": prompt,
419
- "parameters": {
420
- "height": 512,
421
- "width": 768,
422
- "num_inference_steps": 25
 
423
  }
424
- }
425
 
426
- try:
427
  response = requests.post(IMAGE_API_URL, headers=HEADERS, json=payload)
428
  response.raise_for_status()
 
429
  image = Image.open(BytesIO(response.content))
430
  temp_img = tempfile.NamedTemporaryFile(delete=False, suffix=".png")
431
  image.save(temp_img.name)
432
- return temp_img.name, "Image generated successfully"
 
433
  except Exception as e:
434
- return None, f"Error generating image: {str(e)}"
435
 
436
  def create_download_file(content, title, author):
437
  try:
@@ -441,101 +436,79 @@ def create_download_file(content, title, author):
441
  temp_file.close()
442
 
443
  sanitized_title = re.sub(r'[^\w\-_ ]', '_', title)[:50]
444
- new_filename = f"{sanitized_title}.md"
445
- return temp_file.name, new_filename
446
  except Exception as e:
447
- return None, f"Error creating file: {str(e)}"
448
 
449
- def generate_blog(topic, tone, length, author_name, publish_option, linkedin_username=None, linkedin_password=None, hf_token=None):
450
  status_updates = []
 
 
451
  file_path = None
452
- download_filename = None
453
 
454
  try:
 
455
  status_updates.append("🚀 Starting blog generation...")
456
  blog_content = generate_blog_content(topic, tone, length)
457
 
458
  if "Error" in blog_content:
459
  return blog_content, "", "\n".join(status_updates), None
460
 
 
461
  title_match = re.search(r'^#\s+(.+)$', blog_content, re.MULTILINE)
462
- title = title_match.group(1) if title_match else topic
463
 
 
464
  status_updates.append("🖼️ Generating featured image...")
465
- image_path, image_msg = generate_featured_image(topic)
466
- status_updates.append(image_msg)
 
 
 
467
 
 
468
  status_updates.append("📥 Preparing download...")
469
- file_path, download_filename = create_download_file(blog_content, title, author_name)
470
  status_updates.append("✅ Download ready!")
471
 
472
- if publish_option != "none":
473
- status_updates.append(f"📮 Publishing to {publish_option}...")
474
- # Add publishing logic here
475
 
476
- return blog_content, title, "\n".join(status_updates), (file_path, download_filename) if file_path else None
477
-
478
  except Exception as e:
479
- status_updates.append(f"❌ Error: {str(e)}")
480
- return blog_content, title if 'title' in locals() else "", "\n".join(status_updates), None
481
 
 
482
  with gr.Blocks(title="AI Blog Generator", theme=gr.themes.Soft()) as app:
483
- gr.Markdown("# 📝 AI Blog Generator & Publisher")
484
- gr.Markdown("Generate professional blog posts with AI and export as Markdown")
485
 
486
  with gr.Row():
487
  with gr.Column(scale=1):
488
- topic_input = gr.Textbox(label="Blog Topic", placeholder="Enter your blog topic...")
489
  tone_input = gr.Dropdown(
 
490
  label="Writing Style",
491
- choices=["professional", "casual", "technical", "storytelling"],
492
  value="professional"
493
  )
494
  length_input = gr.Dropdown(
 
495
  label="Article Length",
496
- choices=["short", "medium", "long"],
497
  value="medium"
498
  )
499
- author_input = gr.Textbox(label="Author Name", placeholder="John Doe")
500
-
501
- with gr.Accordion("⚙️ Publishing Options", open=False):
502
- publish_option = gr.Radio(
503
- label="Publish Destination",
504
- choices=["none", "linkedin", "huggingface", "both"],
505
- value="none"
506
- )
507
- linkedin_user = gr.Textbox(label="LinkedIn Email", visible=False)
508
- linkedin_pass = gr.Textbox(label="LinkedIn Password", type="password", visible=False)
509
- hf_token = gr.Textbox(label="Hugging Face Token", type="password", visible=False)
510
-
511
- def update_publish_fields(choice):
512
- linkedin_vis = choice in ["linkedin", "both"]
513
- hf_vis = choice in ["huggingface", "both"]
514
- return [
515
- gr.update(visible=linkedin_vis),
516
- gr.update(visible=linkedin_vis),
517
- gr.update(visible=hf_vis)
518
- ]
519
- publish_option.change(update_publish_fields, publish_option, [linkedin_user, linkedin_pass, hf_token])
520
-
521
- generate_btn = gr.Button("Generate Blog", variant="primary")
522
 
523
  with gr.Column(scale=2):
524
  title_output = gr.Textbox(label="Generated Title")
525
- blog_output = gr.Markdown(label="Blog Content", show_label=False)
526
- status_output = gr.Textbox(label="Status", interactive=False)
527
- download_output = gr.File(
528
- label="Download Blog",
529
- visible=False,
530
- file_types=[".md"],
531
- file_count="single"
532
- )
533
-
534
  generate_btn.click(
535
  generate_blog,
536
- inputs=[topic_input, tone_input, length_input, author_input, publish_option, linkedin_user, linkedin_pass, hf_token],
537
  outputs=[blog_output, title_output, status_output, download_output]
538
  )
539
 
540
  if __name__ == "__main__":
541
- app.launch()
 
364
  load_dotenv()
365
 
366
  # Hugging Face configuration
367
+ HF_TOKEN = os.getenv("HF_TOKEN")
368
  TEXT_API_URL = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.2"
369
  IMAGE_API_URL = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-xl-base-1.0"
370
+ HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
371
 
372
  def generate_blog_content(topic, tone="professional", length="medium"):
373
+ try:
374
+ current_date = datetime.now().strftime("%B %d, %Y")
375
+ reading_time = {"short": "5-8", "medium": "8-12", "long": "15-20"}[length]
376
 
377
+ prompt = f"""<s>[INST] Write a {tone} blog post about {topic} with:
378
+ - Title and subtitle
379
+ - Introduction with statistics
380
+ - 2 main sections with subsections
381
+ - Conclusion
382
+ - Markdown formatting
383
+ - Published date: {current_date}
384
+ - Reading time: {reading_time} minutes [/INST]</s>"""
 
 
385
 
386
+ payload = {
387
+ "inputs": prompt,
388
+ "parameters": {
389
+ "max_new_tokens": 1024,
390
+ "temperature": 0.7,
391
+ "return_full_text": False
392
+ }
393
  }
 
394
 
 
395
  response = requests.post(TEXT_API_URL, headers=HEADERS, json=payload)
396
 
397
  if response.status_code == 503:
398
+ estimate = response.json().get('estimated_time', 30)
 
399
  time.sleep(estimate)
400
  response = requests.post(TEXT_API_URL, headers=HEADERS, json=payload)
401
 
402
  response.raise_for_status()
403
+ return response.json()[0]['generated_text']
 
 
404
 
405
  except Exception as e:
406
+ return f"Error generating content: {str(e)}"
 
407
 
408
  def generate_featured_image(topic):
409
+ try:
410
+ prompt = f"Professional digital illustration for blog about {topic}, high quality"
411
+ payload = {
412
+ "inputs": prompt,
413
+ "parameters": {
414
+ "height": 512,
415
+ "width": 768,
416
+ "num_inference_steps": 25
417
+ }
418
  }
 
419
 
 
420
  response = requests.post(IMAGE_API_URL, headers=HEADERS, json=payload)
421
  response.raise_for_status()
422
+
423
  image = Image.open(BytesIO(response.content))
424
  temp_img = tempfile.NamedTemporaryFile(delete=False, suffix=".png")
425
  image.save(temp_img.name)
426
+ return temp_img.name, None # Return path and no error
427
+
428
  except Exception as e:
429
+ return None, f"Image error: {str(e)}"
430
 
431
  def create_download_file(content, title, author):
432
  try:
 
436
  temp_file.close()
437
 
438
  sanitized_title = re.sub(r'[^\w\-_ ]', '_', title)[:50]
439
+ return temp_file.name # Return only the file path
440
+
441
  except Exception as e:
442
+ return None
443
 
444
+ def generate_blog(topic, tone, length, author_name, publish_option, linkedin_user=None, linkedin_pass=None, hf_token=None):
445
  status_updates = []
446
+ blog_content = ""
447
+ title = ""
448
  file_path = None
 
449
 
450
  try:
451
+ # Generate content
452
  status_updates.append("🚀 Starting blog generation...")
453
  blog_content = generate_blog_content(topic, tone, length)
454
 
455
  if "Error" in blog_content:
456
  return blog_content, "", "\n".join(status_updates), None
457
 
458
+ # Extract title
459
  title_match = re.search(r'^#\s+(.+)$', blog_content, re.MULTILINE)
460
+ title = title_match.group(1).strip() if title_match else topic
461
 
462
+ # Generate image
463
  status_updates.append("🖼️ Generating featured image...")
464
+ image_path, image_error = generate_featured_image(topic)
465
+ if image_error:
466
+ status_updates.append(image_error)
467
+ else:
468
+ status_updates.append("✅ Image generated!")
469
 
470
+ # Create downloadable file
471
  status_updates.append("📥 Preparing download...")
472
+ file_path = create_download_file(blog_content, title, author_name)
473
  status_updates.append("✅ Download ready!")
474
 
475
+ return blog_content, title, "\n".join(status_updates), file_path
 
 
476
 
 
 
477
  except Exception as e:
478
+ status_updates.append(f"❌ Critical error: {str(e)}")
479
+ return blog_content, title, "\n".join(status_updates), None
480
 
481
+ # Gradio interface
482
  with gr.Blocks(title="AI Blog Generator", theme=gr.themes.Soft()) as app:
483
+ gr.Markdown("# 📝 AI Blog Generator")
 
484
 
485
  with gr.Row():
486
  with gr.Column(scale=1):
487
+ topic_input = gr.Textbox(label="Blog Topic")
488
  tone_input = gr.Dropdown(
489
+ ["professional", "casual", "technical", "storytelling"],
490
  label="Writing Style",
 
491
  value="professional"
492
  )
493
  length_input = gr.Dropdown(
494
+ ["short", "medium", "long"],
495
  label="Article Length",
 
496
  value="medium"
497
  )
498
+ author_input = gr.Textbox(label="Author Name")
499
+ generate_btn = gr.Button("Generate", variant="primary")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
500
 
501
  with gr.Column(scale=2):
502
  title_output = gr.Textbox(label="Generated Title")
503
+ blog_output = gr.Markdown()
504
+ status_output = gr.Textbox(label="Status")
505
+ download_output = gr.File(label="Download")
506
+
 
 
 
 
 
507
  generate_btn.click(
508
  generate_blog,
509
+ inputs=[topic_input, tone_input, length_input, author_input, gr.Radio(["none"], visible=False)],
510
  outputs=[blog_output, title_output, status_output, download_output]
511
  )
512
 
513
  if __name__ == "__main__":
514
+ app.launch(share=False)