File size: 3,076 Bytes
8b07697
 
7244d4f
8b07697
ea77eb1
61b1afa
8b07697
61b1afa
8b07697
61b1afa
 
8b07697
 
61b1afa
 
8b07697
61b1afa
 
 
8b07697
 
61b1afa
 
7244d4f
61b1afa
 
 
3b57b61
 
61b1afa
 
 
 
 
 
 
 
 
 
 
 
7244d4f
61b1afa
8b07697
61b1afa
3b57b61
61b1afa
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
from transformers import pipeline
import requests
from bs4 import BeautifulSoup
from time import sleep

# Load the model once
try:
    hf_pipe = pipeline("text2text-generation", model="google/flan-t5-small", max_length=512, temperature=0.7)
except Exception as e:
    print(f"Error loading model: {e}")
    hf_pipe = None

def safe_invoke(prompt: str) -> str:
    if not hf_pipe:
        return "Error: Model not loaded"
    try:
        outputs = hf_pipe(prompt)
        if outputs and isinstance(outputs, list):
            return outputs[0]['generated_text'].strip()
        return "Error"
    except Exception as e:
        print(f"Error during generation: {e}")
        return "Error"

def scrape_startpage(query: str, max_results: int = 3):
    url = f"https://www.startpage.com/sp/search?query={query.replace(' ', '+')}"
    headers = {"User-Agent": "Mozilla/5.0"}
    for attempt in range(3):
        try:
            res = requests.get(url, headers=headers, timeout=10)
            res.raise_for_status()
            soup = BeautifulSoup(res.text, "html.parser")
            results = []
            # Find divs with class "result" (Startpage search results)
            for r in soup.find_all("div", class_="result")[:max_results]:
                title = r.find("h3")
                desc = r.find("p", class_="desc")
                title_text = title.get_text(strip=True) if title else "No title"
                desc_text = desc.get_text(strip=True) if desc else "No description"
                results.append(f"{title_text}: {desc_text}")
            return results
        except Exception as e:
            print(f"Scrape error (attempt {attempt+1}): {e}")
            sleep(2 ** attempt)
    return []

def generate_post(topic, platform, search_results):
    base_prompt = f"""You are a social media expert. Write a professional {platform} post about "{topic}".
Use this information to help you: {search_results}
Make the post clear, engaging, and suitable for corporate clients.
Output only the post text."""
    return safe_invoke(base_prompt)

def score_post(post, platform, score_type):
    prompt = f"""Rate the following post on {score_type} from 1 to 10 (just give a number):
Platform: {platform}
Post: {post}
"""
    return safe_invoke(prompt)

def workflow(topic, platform):
    # Step 1: Web search results
    search_results = scrape_startpage(topic)
    combined_results = " | ".join(search_results) if search_results else "No additional info."

    # Step 2: Generate post
    post = generate_post(topic, platform, combined_results)
    if post == "Error":
        return post, "Error", "Error", "Error"

    # Step 3: Scores
    engagement = score_post(post, platform, "engagement")
    tone = score_post(post, platform, "tone")
    clarity = score_post(post, platform, "clarity")

    # Validate scores (should be digits)
    def valid_score(s):
        return s and s.strip().isdigit()

    if not all(map(valid_score, (engagement, tone, clarity))):
        return post, "Error", "Error", "Error"

    return post, engagement, tone, clarity