Spaces:
Runtime error
Runtime error
| # app.py | |
| import streamlit as st | |
| import requests | |
| from bs4 import BeautifulSoup | |
| from openai import OpenAI # New-style OpenAI client for Groq | |
| # Set your Groq API key and base URL | |
| client = OpenAI( | |
| api_key="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", # Replace with your actual Groq API key | |
| base_url="https://api.groq.com/openai/v1" | |
| ) | |
| def extract_text_from_url(url): | |
| try: | |
| response = requests.get(url) | |
| soup = BeautifulSoup(response.text, 'html.parser') | |
| paragraphs = soup.find_all('p') | |
| text = ' '.join([p.get_text() for p in paragraphs]) | |
| return text | |
| except Exception as e: | |
| return f"Error fetching {url}: {e}" | |
| def generate_blog(content, keywords): | |
| prompt = f""" | |
| You are a professional SEO blog writer. | |
| Based on the following combined content, generate a completely new, attractive, and SEO-optimized blog. | |
| Please naturally incorporate the following keywords: {', '.join(keywords)}. | |
| Content: | |
| {content} | |
| Write the new blog post: | |
| """ | |
| response = client.chat.completions.create( | |
| model="llama3-70b-8192", # LLaMA 3 via Groq | |
| messages=[ | |
| {"role": "system", "content": "You are an expert SEO content writer."}, | |
| {"role": "user", "content": prompt} | |
| ], | |
| temperature=0.7, | |
| max_tokens=1500 | |
| ) | |
| return response.choices[0].message.content | |
| def main(): | |
| st.title("📝 Pro SEO Blog Writer") | |
| st.subheader("Enter three blog URLs:") | |
| url1 = st.text_input("Blog URL 1") | |
| url2 = st.text_input("Blog URL 2") | |
| url3 = st.text_input("Blog URL 3") | |
| st.subheader("Enter Target Keywords (comma separated):") | |
| keywords_input = st.text_input("Example: AI, machine learning, future technology") | |
| if st.button("Generate New Blog"): | |
| if url1 and url2 and url3 and keywords_input: | |
| with st.spinner("Extracting content and generating blog..."): | |
| content1 = extract_text_from_url(url1) | |
| content2 = extract_text_from_url(url2) | |
| content3 = extract_text_from_url(url3) | |
| combined_content = content1 + "\n\n" + content2 + "\n\n" + content3 | |
| keywords = [kw.strip() for kw in keywords_input.split(",")] | |
| new_blog = generate_blog(combined_content, keywords) | |
| st.success("✅ Blog generated successfully!") | |
| st.subheader("Generated Blog:") | |
| st.write(new_blog) | |
| st.download_button("Download Blog as TXT", data=new_blog, file_name="seo_blog.txt") | |
| else: | |
| st.warning("Please fill in all fields.") | |
| if __name__ == "__main__": | |
| main() | |